< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2121   case Deoptimization::Action_make_not_entrant:
2122     C->set_trap_can_recompile(true);
2123     break;
2124   case Deoptimization::Action_none:
2125   case Deoptimization::Action_make_not_compilable:
2126     break;
2127   default:
2128 #ifdef ASSERT
2129     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2130 #endif
2131     break;
2132   }
2133 
2134   if (TraceOptoParse) {
2135     char buf[100];
2136     tty->print_cr("Uncommon trap %s at bci:%d",
2137                   Deoptimization::format_trap_request(buf, sizeof(buf),
2138                                                       trap_request), bci());
2139   }
2140 










2141   CompileLog* log = C->log();
2142   if (log != nullptr) {
2143     int kid = (klass == nullptr)? -1: log->identify(klass);
2144     log->begin_elem("uncommon_trap bci='%d'", bci());
2145     char buf[100];
2146     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2147                                                           trap_request));
2148     if (kid >= 0)         log->print(" klass='%d'", kid);
2149     if (comment != nullptr)  log->print(" comment='%s'", comment);
2150     log->end_elem();
2151   }
2152 
2153   // Make sure any guarding test views this path as very unlikely
2154   Node *i0 = control()->in(0);
2155   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2156     IfNode *iff = i0->as_If();
2157     float f = iff->_prob;   // Get prob
2158     if (control()->Opcode() == Op_IfTrue) {
2159       if (f > PROB_UNLIKELY_MAG(4))
2160         iff->_prob = PROB_MIN;

2958   if (UncommonNullCast               // Cutout for this technique
2959       && obj != null()               // And not the -Xcomp stupid case?
2960       && !too_many_traps(reason)
2961       ) {
2962     if (speculating) {
2963       return true;
2964     }
2965     if (data == nullptr)
2966       // Edge case:  no mature data.  Be optimistic here.
2967       return true;
2968     // If the profile has not seen a null, assume it won't happen.
2969     assert(java_bc() == Bytecodes::_checkcast ||
2970            java_bc() == Bytecodes::_instanceof ||
2971            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2972     return !data->as_BitData()->null_seen();
2973   }
2974   speculating = false;
2975   return false;
2976 }
2977 









































































2978 void GraphKit::guard_klass_being_initialized(Node* klass) {
2979   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2980   Node* adr = basic_plus_adr(top(), klass, init_state_off);
2981   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
2982                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
2983                                     T_BYTE, MemNode::acquire);
2984   init_state = _gvn.transform(init_state);
2985 
2986   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
2987 
2988   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
2989   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
2990 
2991   { BuildCutout unless(this, tst, PROB_MAX);
2992     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
2993   }
2994 }
2995 
2996 void GraphKit::guard_init_thread(Node* klass) {
2997   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
2998   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
2999 
3000   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3001                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3002                                      T_ADDRESS, MemNode::unordered);
3003   init_thread = _gvn.transform(init_thread);
3004 
3005   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3006 
3007   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3008   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3009 
3010   { BuildCutout unless(this, tst, PROB_MAX);
3011     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3012   }
3013 }
3014 
3015 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3016   if (ik->is_being_initialized()) {
3017     if (C->needs_clinit_barrier(ik, context)) {
3018       Node* klass = makecon(TypeKlassPtr::make(ik));
3019       guard_klass_being_initialized(klass);
3020       guard_init_thread(klass);
3021       insert_mem_bar(Op_MemBarCPUOrder);
3022     }
3023   } else if (ik->is_initialized()) {
3024     return; // no barrier needed
3025   } else {
3026     uncommon_trap(Deoptimization::Reason_uninitialized,
3027                   Deoptimization::Action_reinterpret,
3028                   nullptr);
3029   }
3030 }
3031 
3032 //------------------------maybe_cast_profiled_receiver-------------------------
3033 // If the profile has seen exactly one type, narrow to exactly that type.
3034 // Subsequent type checks will always fold up.
3035 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3036                                              const TypeKlassPtr* require_klass,
3037                                              ciKlass* spec_klass,
3038                                              bool safe_for_replace) {

2121   case Deoptimization::Action_make_not_entrant:
2122     C->set_trap_can_recompile(true);
2123     break;
2124   case Deoptimization::Action_none:
2125   case Deoptimization::Action_make_not_compilable:
2126     break;
2127   default:
2128 #ifdef ASSERT
2129     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2130 #endif
2131     break;
2132   }
2133 
2134   if (TraceOptoParse) {
2135     char buf[100];
2136     tty->print_cr("Uncommon trap %s at bci:%d",
2137                   Deoptimization::format_trap_request(buf, sizeof(buf),
2138                                                       trap_request), bci());
2139   }
2140 
2141   if (PreloadReduceTraps && Compile::current()->for_preload() &&
2142       (action != Deoptimization::Action_none)) {
2143     ResourceMark rm;
2144     ciMethod* cim = Compile::current()->method();
2145     log_debug(scc,deoptimization)("Uncommon trap in preload code: reason=%s action=%s method=%s::%s bci=%d, %s",
2146                   Deoptimization::trap_reason_name(reason), Deoptimization::trap_action_name(action),
2147                   cim->holder()->name()->as_klass_external_name(), cim->name()->as_klass_external_name(),
2148                   bci(), comment);
2149   }
2150 
2151   CompileLog* log = C->log();
2152   if (log != nullptr) {
2153     int kid = (klass == nullptr)? -1: log->identify(klass);
2154     log->begin_elem("uncommon_trap bci='%d'", bci());
2155     char buf[100];
2156     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2157                                                           trap_request));
2158     if (kid >= 0)         log->print(" klass='%d'", kid);
2159     if (comment != nullptr)  log->print(" comment='%s'", comment);
2160     log->end_elem();
2161   }
2162 
2163   // Make sure any guarding test views this path as very unlikely
2164   Node *i0 = control()->in(0);
2165   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2166     IfNode *iff = i0->as_If();
2167     float f = iff->_prob;   // Get prob
2168     if (control()->Opcode() == Op_IfTrue) {
2169       if (f > PROB_UNLIKELY_MAG(4))
2170         iff->_prob = PROB_MIN;

2968   if (UncommonNullCast               // Cutout for this technique
2969       && obj != null()               // And not the -Xcomp stupid case?
2970       && !too_many_traps(reason)
2971       ) {
2972     if (speculating) {
2973       return true;
2974     }
2975     if (data == nullptr)
2976       // Edge case:  no mature data.  Be optimistic here.
2977       return true;
2978     // If the profile has not seen a null, assume it won't happen.
2979     assert(java_bc() == Bytecodes::_checkcast ||
2980            java_bc() == Bytecodes::_instanceof ||
2981            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2982     return !data->as_BitData()->null_seen();
2983   }
2984   speculating = false;
2985   return false;
2986 }
2987 
2988 void GraphKit::guard_klass_is_initialized(Node* klass) {
2989   assert(ClassInitBarrierMode > 0, "no barriers");
2990   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2991   Node* adr = basic_plus_adr(top(), klass, init_state_off);
2992   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
2993                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
2994                                     T_BYTE, MemNode::unordered);
2995   init_state = _gvn.transform(init_state);
2996 
2997   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
2998 
2999   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3000   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3001 
3002   switch (ClassInitBarrierMode) {
3003     case 1: { // uncommon trap on slow path
3004       BuildCutout unless(this, tst, PROB_MAX);
3005       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3006       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3007       break;
3008     }
3009     case 2: { // runtime call on slow path
3010       if (StressClassInitBarriers) {
3011         tst = makecon(TypeInt::ZERO); // always go through slow path
3012       }
3013       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3014 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3015 
3016       RegionNode* r = new RegionNode(3);
3017       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3018 
3019       set_control(_gvn.transform(new IfFalseNode(iff)));
3020 
3021       if (!stopped()) {
3022         kill_dead_locals();
3023 
3024         Node* call = make_runtime_call(RC_NO_LEAF,
3025                                        OptoRuntime::class_init_barrier_Type(),
3026                                        OptoRuntime::class_init_barrier_Java(),
3027                                        nullptr, TypePtr::BOTTOM,
3028                                        klass);
3029         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3030         call->jvms()->set_should_reexecute(true);
3031 
3032         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3033         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3034         // Compilation fails when distinct exception states are combined.
3035         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3036 
3037         Node* fast_io  = call->in(TypeFunc::I_O);
3038         Node* fast_mem = call->in(TypeFunc::Memory);
3039         // These two phis are pre-filled with copies of of the fast IO and Memory
3040         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3041         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3042 
3043         r->init_req(2, control());
3044         io_phi->init_req(2, i_o());
3045         mem_phi->init_req(2, reset_memory());
3046 
3047         set_all_memory(_gvn.transform(mem_phi));
3048         set_i_o(_gvn.transform(io_phi));
3049       } else {
3050         r->init_req(2, top());
3051       }
3052       set_control(_gvn.transform(r));
3053       break;
3054     }
3055 
3056     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3057   }
3058   C->set_has_clinit_barriers(true);
3059 }
3060 
3061 void GraphKit::guard_klass_being_initialized(Node* klass) {
3062   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3063   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3064   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3065                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3066                                     T_BYTE, MemNode::acquire);
3067   init_state = _gvn.transform(init_state);
3068 
3069   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3070 
3071   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3072   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3073 
3074   { BuildCutout unless(this, tst, PROB_MAX);
3075     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3076   }
3077 }
3078 
3079 void GraphKit::guard_init_thread(Node* klass) {
3080   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3081   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3082 
3083   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3084                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3085                                      T_ADDRESS, MemNode::unordered);
3086   init_thread = _gvn.transform(init_thread);
3087 
3088   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3089 
3090   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3091   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3092 
3093   { BuildCutout unless(this, tst, PROB_MAX);
3094     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3095   }
3096 }
3097 
3098 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3099   if (C->do_clinit_barriers()) {
3100     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3101     guard_klass_is_initialized(klass);
3102     return;
3103   }
3104   if (ik->is_being_initialized()) {
3105     if (C->needs_clinit_barrier(ik, context)) {
3106       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3107       guard_klass_being_initialized(klass);
3108       guard_init_thread(klass);
3109       insert_mem_bar(Op_MemBarCPUOrder);
3110     }
3111   } else if (ik->is_initialized()) {
3112     return; // no barrier needed
3113   } else {
3114     uncommon_trap(Deoptimization::Reason_uninitialized,
3115                   Deoptimization::Action_reinterpret,
3116                   nullptr);
3117   }
3118 }
3119 
3120 //------------------------maybe_cast_profiled_receiver-------------------------
3121 // If the profile has seen exactly one type, narrow to exactly that type.
3122 // Subsequent type checks will always fold up.
3123 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3124                                              const TypeKlassPtr* require_klass,
3125                                              ciKlass* spec_klass,
3126                                              bool safe_for_replace) {
< prev index next >