< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2990   if (UncommonNullCast               // Cutout for this technique
2991       && obj != null()               // And not the -Xcomp stupid case?
2992       && !too_many_traps(reason)
2993       ) {
2994     if (speculating) {
2995       return true;
2996     }
2997     if (data == nullptr)
2998       // Edge case:  no mature data.  Be optimistic here.
2999       return true;
3000     // If the profile has not seen a null, assume it won't happen.
3001     assert(java_bc() == Bytecodes::_checkcast ||
3002            java_bc() == Bytecodes::_instanceof ||
3003            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3004     return !data->as_BitData()->null_seen();
3005   }
3006   speculating = false;
3007   return false;
3008 }
3009 









































































3010 void GraphKit::guard_klass_being_initialized(Node* klass) {
3011   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3012   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3013   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3014                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3015                                     T_BYTE, MemNode::acquire);
3016   init_state = _gvn.transform(init_state);
3017 
3018   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3019 
3020   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3021   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3022 
3023   { BuildCutout unless(this, tst, PROB_MAX);
3024     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3025   }
3026 }
3027 
3028 void GraphKit::guard_init_thread(Node* klass) {
3029   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3030   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3031 
3032   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3033                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3034                                      T_ADDRESS, MemNode::unordered);
3035   init_thread = _gvn.transform(init_thread);
3036 
3037   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3038 
3039   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3040   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3041 
3042   { BuildCutout unless(this, tst, PROB_MAX);
3043     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3044   }
3045 }
3046 
3047 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3048   if (ik->is_being_initialized()) {
3049     if (C->needs_clinit_barrier(ik, context)) {
3050       Node* klass = makecon(TypeKlassPtr::make(ik));
3051       guard_klass_being_initialized(klass);
3052       guard_init_thread(klass);
3053       insert_mem_bar(Op_MemBarCPUOrder);
3054     }
3055   } else if (ik->is_initialized()) {
3056     return; // no barrier needed
3057   } else {
3058     uncommon_trap(Deoptimization::Reason_uninitialized,
3059                   Deoptimization::Action_reinterpret,
3060                   nullptr);
3061   }
3062 }
3063 
3064 //------------------------maybe_cast_profiled_receiver-------------------------
3065 // If the profile has seen exactly one type, narrow to exactly that type.
3066 // Subsequent type checks will always fold up.
3067 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3068                                              const TypeKlassPtr* require_klass,
3069                                              ciKlass* spec_klass,
3070                                              bool safe_for_replace) {

2990   if (UncommonNullCast               // Cutout for this technique
2991       && obj != null()               // And not the -Xcomp stupid case?
2992       && !too_many_traps(reason)
2993       ) {
2994     if (speculating) {
2995       return true;
2996     }
2997     if (data == nullptr)
2998       // Edge case:  no mature data.  Be optimistic here.
2999       return true;
3000     // If the profile has not seen a null, assume it won't happen.
3001     assert(java_bc() == Bytecodes::_checkcast ||
3002            java_bc() == Bytecodes::_instanceof ||
3003            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3004     return !data->as_BitData()->null_seen();
3005   }
3006   speculating = false;
3007   return false;
3008 }
3009 
3010 void GraphKit::guard_klass_is_initialized(Node* klass) {
3011   assert(ClassInitBarrierMode > 0, "no barriers");
3012   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3013   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3014   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3015                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3016                                     T_BYTE, MemNode::unordered);
3017   init_state = _gvn.transform(init_state);
3018 
3019   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3020 
3021   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3022   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3023 
3024   switch (ClassInitBarrierMode) {
3025     case 1: { // uncommon trap on slow path
3026       BuildCutout unless(this, tst, PROB_MAX);
3027       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3028       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3029       break;
3030     }
3031     case 2: { // runtime call on slow path
3032       if (StressClassInitBarriers) {
3033         tst = makecon(TypeInt::ZERO); // always go through slow path
3034       }
3035       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3036 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3037 
3038       RegionNode* r = new RegionNode(3);
3039       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3040 
3041       set_control(_gvn.transform(new IfFalseNode(iff)));
3042 
3043       if (!stopped()) {
3044         kill_dead_locals();
3045 
3046         Node* call = make_runtime_call(RC_NO_LEAF,
3047                                        OptoRuntime::class_init_barrier_Type(),
3048                                        OptoRuntime::class_init_barrier_Java(),
3049                                        nullptr, TypePtr::BOTTOM,
3050                                        klass);
3051         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3052         call->jvms()->set_should_reexecute(true);
3053 
3054         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3055         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3056         // Compilation fails when distinct exception states are combined.
3057         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3058 
3059         Node* fast_io  = call->in(TypeFunc::I_O);
3060         Node* fast_mem = call->in(TypeFunc::Memory);
3061         // These two phis are pre-filled with copies of of the fast IO and Memory
3062         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3063         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3064 
3065         r->init_req(2, control());
3066         io_phi->init_req(2, i_o());
3067         mem_phi->init_req(2, reset_memory());
3068 
3069         set_all_memory(_gvn.transform(mem_phi));
3070         set_i_o(_gvn.transform(io_phi));
3071       } else {
3072         r->init_req(2, top());
3073       }
3074       set_control(_gvn.transform(r));
3075       break;
3076     }
3077 
3078     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3079   }
3080   C->set_has_clinit_barriers(true);
3081 }
3082 
3083 void GraphKit::guard_klass_being_initialized(Node* klass) {
3084   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3085   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3086   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3087                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3088                                     T_BYTE, MemNode::acquire);
3089   init_state = _gvn.transform(init_state);
3090 
3091   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3092 
3093   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3094   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3095 
3096   { BuildCutout unless(this, tst, PROB_MAX);
3097     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3098   }
3099 }
3100 
3101 void GraphKit::guard_init_thread(Node* klass) {
3102   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3103   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3104 
3105   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3106                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3107                                      T_ADDRESS, MemNode::unordered);
3108   init_thread = _gvn.transform(init_thread);
3109 
3110   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3111 
3112   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3113   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3114 
3115   { BuildCutout unless(this, tst, PROB_MAX);
3116     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3117   }
3118 }
3119 
3120 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3121   if (C->do_clinit_barriers()) {
3122     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3123     guard_klass_is_initialized(klass);
3124     return;
3125   }
3126   if (ik->is_being_initialized()) {
3127     if (C->needs_clinit_barrier(ik, context)) {
3128       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3129       guard_klass_being_initialized(klass);
3130       guard_init_thread(klass);
3131       insert_mem_bar(Op_MemBarCPUOrder);
3132     }
3133   } else if (ik->is_initialized()) {
3134     return; // no barrier needed
3135   } else {
3136     uncommon_trap(Deoptimization::Reason_uninitialized,
3137                   Deoptimization::Action_reinterpret,
3138                   nullptr);
3139   }
3140 }
3141 
3142 //------------------------maybe_cast_profiled_receiver-------------------------
3143 // If the profile has seen exactly one type, narrow to exactly that type.
3144 // Subsequent type checks will always fold up.
3145 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3146                                              const TypeKlassPtr* require_klass,
3147                                              ciKlass* spec_klass,
3148                                              bool safe_for_replace) {
< prev index next >