< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2989   if (UncommonNullCast               // Cutout for this technique
2990       && obj != null()               // And not the -Xcomp stupid case?
2991       && !too_many_traps(reason)
2992       ) {
2993     if (speculating) {
2994       return true;
2995     }
2996     if (data == nullptr)
2997       // Edge case:  no mature data.  Be optimistic here.
2998       return true;
2999     // If the profile has not seen a null, assume it won't happen.
3000     assert(java_bc() == Bytecodes::_checkcast ||
3001            java_bc() == Bytecodes::_instanceof ||
3002            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3003     return !data->as_BitData()->null_seen();
3004   }
3005   speculating = false;
3006   return false;
3007 }
3008 









































































3009 void GraphKit::guard_klass_being_initialized(Node* klass) {
3010   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3011   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3012   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3013                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3014                                     T_BYTE, MemNode::acquire);
3015   init_state = _gvn.transform(init_state);
3016 
3017   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3018 
3019   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3020   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3021 
3022   { BuildCutout unless(this, tst, PROB_MAX);
3023     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3024   }
3025 }
3026 
3027 void GraphKit::guard_init_thread(Node* klass) {
3028   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3029   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3030 
3031   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3032                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3033                                      T_ADDRESS, MemNode::unordered);
3034   init_thread = _gvn.transform(init_thread);
3035 
3036   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3037 
3038   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3039   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3040 
3041   { BuildCutout unless(this, tst, PROB_MAX);
3042     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3043   }
3044 }
3045 
3046 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3047   if (ik->is_being_initialized()) {
3048     if (C->needs_clinit_barrier(ik, context)) {
3049       Node* klass = makecon(TypeKlassPtr::make(ik));
3050       guard_klass_being_initialized(klass);
3051       guard_init_thread(klass);
3052       insert_mem_bar(Op_MemBarCPUOrder);
3053     }
3054   } else if (ik->is_initialized()) {
3055     return; // no barrier needed
3056   } else {
3057     uncommon_trap(Deoptimization::Reason_uninitialized,
3058                   Deoptimization::Action_reinterpret,
3059                   nullptr);
3060   }
3061 }
3062 
3063 //------------------------maybe_cast_profiled_receiver-------------------------
3064 // If the profile has seen exactly one type, narrow to exactly that type.
3065 // Subsequent type checks will always fold up.
3066 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3067                                              const TypeKlassPtr* require_klass,
3068                                              ciKlass* spec_klass,
3069                                              bool safe_for_replace) {

2989   if (UncommonNullCast               // Cutout for this technique
2990       && obj != null()               // And not the -Xcomp stupid case?
2991       && !too_many_traps(reason)
2992       ) {
2993     if (speculating) {
2994       return true;
2995     }
2996     if (data == nullptr)
2997       // Edge case:  no mature data.  Be optimistic here.
2998       return true;
2999     // If the profile has not seen a null, assume it won't happen.
3000     assert(java_bc() == Bytecodes::_checkcast ||
3001            java_bc() == Bytecodes::_instanceof ||
3002            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3003     return !data->as_BitData()->null_seen();
3004   }
3005   speculating = false;
3006   return false;
3007 }
3008 
3009 void GraphKit::guard_klass_is_initialized(Node* klass) {
3010   assert(ClassInitBarrierMode > 0, "no barriers");
3011   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3012   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3013   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3014                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3015                                     T_BYTE, MemNode::unordered);
3016   init_state = _gvn.transform(init_state);
3017 
3018   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3019 
3020   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3021   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3022 
3023   switch (ClassInitBarrierMode) {
3024     case 1: { // uncommon trap on slow path
3025       BuildCutout unless(this, tst, PROB_MAX);
3026       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3027       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3028       break;
3029     }
3030     case 2: { // runtime call on slow path
3031       if (StressClassInitBarriers) {
3032         tst = makecon(TypeInt::ZERO); // always go through slow path
3033       }
3034       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3035 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3036 
3037       RegionNode* r = new RegionNode(3);
3038       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3039 
3040       set_control(_gvn.transform(new IfFalseNode(iff)));
3041 
3042       if (!stopped()) {
3043         kill_dead_locals();
3044 
3045         Node* call = make_runtime_call(RC_NO_LEAF,
3046                                        OptoRuntime::class_init_barrier_Type(),
3047                                        OptoRuntime::class_init_barrier_Java(),
3048                                        nullptr, TypePtr::BOTTOM,
3049                                        klass);
3050         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3051         call->jvms()->set_should_reexecute(true);
3052 
3053         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3054         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3055         // Compilation fails when distinct exception states are combined.
3056         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3057 
3058         Node* fast_io  = call->in(TypeFunc::I_O);
3059         Node* fast_mem = call->in(TypeFunc::Memory);
3060         // These two phis are pre-filled with copies of of the fast IO and Memory
3061         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3062         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3063 
3064         r->init_req(2, control());
3065         io_phi->init_req(2, i_o());
3066         mem_phi->init_req(2, reset_memory());
3067 
3068         set_all_memory(_gvn.transform(mem_phi));
3069         set_i_o(_gvn.transform(io_phi));
3070       } else {
3071         r->init_req(2, top());
3072       }
3073       set_control(_gvn.transform(r));
3074       break;
3075     }
3076 
3077     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3078   }
3079   C->set_has_clinit_barriers(true);
3080 }
3081 
3082 void GraphKit::guard_klass_being_initialized(Node* klass) {
3083   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3084   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3085   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3086                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3087                                     T_BYTE, MemNode::acquire);
3088   init_state = _gvn.transform(init_state);
3089 
3090   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3091 
3092   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3093   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3094 
3095   { BuildCutout unless(this, tst, PROB_MAX);
3096     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3097   }
3098 }
3099 
3100 void GraphKit::guard_init_thread(Node* klass) {
3101   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3102   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3103 
3104   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3105                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3106                                      T_ADDRESS, MemNode::unordered);
3107   init_thread = _gvn.transform(init_thread);
3108 
3109   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3110 
3111   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3112   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3113 
3114   { BuildCutout unless(this, tst, PROB_MAX);
3115     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3116   }
3117 }
3118 
3119 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3120   if (C->do_clinit_barriers()) {
3121     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3122     guard_klass_is_initialized(klass);
3123     return;
3124   }
3125   if (ik->is_being_initialized()) {
3126     if (C->needs_clinit_barrier(ik, context)) {
3127       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3128       guard_klass_being_initialized(klass);
3129       guard_init_thread(klass);
3130       insert_mem_bar(Op_MemBarCPUOrder);
3131     }
3132   } else if (ik->is_initialized()) {
3133     return; // no barrier needed
3134   } else {
3135     uncommon_trap(Deoptimization::Reason_uninitialized,
3136                   Deoptimization::Action_reinterpret,
3137                   nullptr);
3138   }
3139 }
3140 
3141 //------------------------maybe_cast_profiled_receiver-------------------------
3142 // If the profile has seen exactly one type, narrow to exactly that type.
3143 // Subsequent type checks will always fold up.
3144 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3145                                              const TypeKlassPtr* require_klass,
3146                                              ciKlass* spec_klass,
3147                                              bool safe_for_replace) {
< prev index next >