< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2992   if (UncommonNullCast               // Cutout for this technique
2993       && obj != null()               // And not the -Xcomp stupid case?
2994       && !too_many_traps(reason)
2995       ) {
2996     if (speculating) {
2997       return true;
2998     }
2999     if (data == nullptr)
3000       // Edge case:  no mature data.  Be optimistic here.
3001       return true;
3002     // If the profile has not seen a null, assume it won't happen.
3003     assert(java_bc() == Bytecodes::_checkcast ||
3004            java_bc() == Bytecodes::_instanceof ||
3005            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3006     return !data->as_BitData()->null_seen();
3007   }
3008   speculating = false;
3009   return false;
3010 }
3011 







































































3012 void GraphKit::guard_klass_being_initialized(Node* klass) {
3013   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3014   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3015   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3016                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3017                                     T_BYTE, MemNode::unordered);
3018   init_state = _gvn.transform(init_state);
3019 
3020   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3021 
3022   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3023   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3024 
3025   { BuildCutout unless(this, tst, PROB_MAX);
3026     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3027   }
3028 }
3029 
3030 void GraphKit::guard_init_thread(Node* klass) {
3031   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3032   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3033 
3034   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3035                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3036                                      T_ADDRESS, MemNode::unordered);
3037   init_thread = _gvn.transform(init_thread);
3038 
3039   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3040 
3041   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3042   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3043 
3044   { BuildCutout unless(this, tst, PROB_MAX);
3045     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3046   }
3047 }
3048 
3049 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3050   if (ik->is_being_initialized()) {
3051     if (C->needs_clinit_barrier(ik, context)) {
3052       Node* klass = makecon(TypeKlassPtr::make(ik));
3053       guard_klass_being_initialized(klass);
3054       guard_init_thread(klass);
3055       insert_mem_bar(Op_MemBarCPUOrder);
3056     }
3057   } else if (ik->is_initialized()) {
3058     return; // no barrier needed
3059   } else {
3060     uncommon_trap(Deoptimization::Reason_uninitialized,
3061                   Deoptimization::Action_reinterpret,
3062                   nullptr);
3063   }
3064 }
3065 
3066 //------------------------maybe_cast_profiled_receiver-------------------------
3067 // If the profile has seen exactly one type, narrow to exactly that type.
3068 // Subsequent type checks will always fold up.
3069 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3070                                              const TypeKlassPtr* require_klass,
3071                                              ciKlass* spec_klass,
3072                                              bool safe_for_replace) {

2992   if (UncommonNullCast               // Cutout for this technique
2993       && obj != null()               // And not the -Xcomp stupid case?
2994       && !too_many_traps(reason)
2995       ) {
2996     if (speculating) {
2997       return true;
2998     }
2999     if (data == nullptr)
3000       // Edge case:  no mature data.  Be optimistic here.
3001       return true;
3002     // If the profile has not seen a null, assume it won't happen.
3003     assert(java_bc() == Bytecodes::_checkcast ||
3004            java_bc() == Bytecodes::_instanceof ||
3005            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3006     return !data->as_BitData()->null_seen();
3007   }
3008   speculating = false;
3009   return false;
3010 }
3011 
3012 void GraphKit::guard_klass_is_initialized(Node* klass) {
3013   assert(ClassInitBarrierMode > 0, "no barriers");
3014   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3015   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3016   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3017                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3018                                     T_BYTE, MemNode::unordered);
3019   init_state = _gvn.transform(init_state);
3020 
3021   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3022 
3023   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3024   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3025 
3026   switch (ClassInitBarrierMode) {
3027     case 1: { // uncommon trap on slow path
3028       BuildCutout unless(this, tst, PROB_MAX);
3029       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3030       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3031       break;
3032     }
3033     case 2: { // runtime call on slow path
3034       if (StressClassInitBarriers) {
3035         tst = makecon(TypeInt::ZERO); // always go through slow path
3036       }
3037       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3038 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3039 
3040       RegionNode* r = new RegionNode(3);
3041       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3042 
3043       set_control(_gvn.transform(new IfFalseNode(iff)));
3044 
3045       if (!stopped()) {
3046         kill_dead_locals();
3047 
3048         Node* call = make_runtime_call(RC_NO_LEAF,
3049                                        OptoRuntime::class_init_barrier_Type(),
3050                                        OptoRuntime::class_init_barrier_Java(),
3051                                        nullptr, TypePtr::BOTTOM,
3052                                        klass);
3053 
3054         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3055         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3056         // Compilation fails when distinct exception states are combined.
3057         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3058 
3059         Node* fast_io  = call->in(TypeFunc::I_O);
3060         Node* fast_mem = call->in(TypeFunc::Memory);
3061         // These two phis are pre-filled with copies of of the fast IO and Memory
3062         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3063         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3064 
3065         r->init_req(2, control());
3066         io_phi->init_req(2, i_o());
3067         mem_phi->init_req(2, reset_memory());
3068 
3069         set_all_memory(_gvn.transform(mem_phi));
3070         set_i_o(_gvn.transform(io_phi));
3071       } else {
3072         r->init_req(2, top());
3073       }
3074       set_control(_gvn.transform(r));
3075       break;
3076     }
3077 
3078     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3079   }
3080   C->set_has_clinit_barriers(true);
3081 }
3082 
3083 void GraphKit::guard_klass_being_initialized(Node* klass) {
3084   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3085   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3086   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3087                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3088                                     T_BYTE, MemNode::unordered);
3089   init_state = _gvn.transform(init_state);
3090 
3091   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3092 
3093   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3094   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3095 
3096   { BuildCutout unless(this, tst, PROB_MAX);
3097     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3098   }
3099 }
3100 
3101 void GraphKit::guard_init_thread(Node* klass) {
3102   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3103   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3104 
3105   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3106                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3107                                      T_ADDRESS, MemNode::unordered);
3108   init_thread = _gvn.transform(init_thread);
3109 
3110   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3111 
3112   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3113   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3114 
3115   { BuildCutout unless(this, tst, PROB_MAX);
3116     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3117   }
3118 }
3119 
3120 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3121   if (C->do_clinit_barriers()) {
3122     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3123     guard_klass_is_initialized(klass);
3124     return;
3125   }
3126   if (ik->is_being_initialized()) {
3127     if (C->needs_clinit_barrier(ik, context)) {
3128       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3129       guard_klass_being_initialized(klass);
3130       guard_init_thread(klass);
3131       insert_mem_bar(Op_MemBarCPUOrder);
3132     }
3133   } else if (ik->is_initialized()) {
3134     return; // no barrier needed
3135   } else {
3136     uncommon_trap(Deoptimization::Reason_uninitialized,
3137                   Deoptimization::Action_reinterpret,
3138                   nullptr);
3139   }
3140 }
3141 
3142 //------------------------maybe_cast_profiled_receiver-------------------------
3143 // If the profile has seen exactly one type, narrow to exactly that type.
3144 // Subsequent type checks will always fold up.
3145 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3146                                              const TypeKlassPtr* require_klass,
3147                                              ciKlass* spec_klass,
3148                                              bool safe_for_replace) {
< prev index next >