21 * questions.
22 *
23 */
24
25 #include "compiler/compileLog.hpp"
26 #include "interpreter/linkResolver.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "oops/method.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/c2compiler.hpp"
31 #include "opto/castnode.hpp"
32 #include "opto/idealGraphPrinter.hpp"
33 #include "opto/locknode.hpp"
34 #include "opto/memnode.hpp"
35 #include "opto/opaquenode.hpp"
36 #include "opto/parse.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "runtime/handles.inline.hpp"
41 #include "runtime/safepointMechanism.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "utilities/bitMap.inline.hpp"
44 #include "utilities/copy.hpp"
45
46 // Static array so we can figure out which bytecodes stop us from compiling
47 // the most. Some of the non-static variables are needed in bytecodeInfo.cpp
48 // and eventually should be encapsulated in a proper class (gri 8/18/98).
49
50 #ifndef PRODUCT
51 uint nodes_created = 0;
52 uint methods_parsed = 0;
53 uint methods_seen = 0;
54 uint blocks_parsed = 0;
55 uint blocks_seen = 0;
56
57 uint explicit_null_checks_inserted = 0;
58 uint explicit_null_checks_elided = 0;
59 uint all_null_checks_found = 0;
60 uint implicit_null_checks = 0;
1116 _exits.map()->apply_replaced_nodes(_new_idx);
1117 }
1118
1119 //-----------------------------create_entry_map-------------------------------
1120 // Initialize our parser map to contain the types at method entry.
1121 // For OSR, the map contains a single RawPtr parameter.
1122 // Initial monitor locking for sync. methods is performed by do_method_entry.
1123 SafePointNode* Parse::create_entry_map() {
1124 // Check for really stupid bail-out cases.
1125 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1126 if (len >= 32760) {
1127 // Bailout expected, this is a very rare edge case.
1128 C->record_method_not_compilable("too many local variables");
1129 return nullptr;
1130 }
1131
1132 // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1133 _caller->map()->delete_replaced_nodes();
1134
1135 // If this is an inlined method, we may have to do a receiver null check.
1136 if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1137 GraphKit kit(_caller);
1138 kit.null_check_receiver_before_call(method());
1139 _caller = kit.transfer_exceptions_into_jvms();
1140 if (kit.stopped()) {
1141 _exits.add_exception_states_from(_caller);
1142 _exits.set_jvms(_caller);
1143 return nullptr;
1144 }
1145 }
1146
1147 assert(method() != nullptr, "parser must have a method");
1148
1149 // Create an initial safepoint to hold JVM state during parsing
1150 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : nullptr);
1151 set_map(new SafePointNode(len, jvms));
1152
1153 // Capture receiver info for compiled lambda forms.
1154 if (method()->is_compiled_lambda_form()) {
1155 ciInstance* recv_info = _caller->compute_receiver_info(method());
1156 jvms->set_receiver_info(recv_info);
1157 }
1158
1191 map()->init_req(i, top());
1192 }
1193
1194 SafePointNode* entry_map = stop();
1195 return entry_map;
1196 }
1197
1198 //-----------------------------do_method_entry--------------------------------
1199 // Emit any code needed in the pseudo-block before BCI zero.
1200 // The main thing to do is lock the receiver of a synchronized method.
1201 void Parse::do_method_entry() {
1202 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1203 set_sp(0); // Java Stack Pointer
1204
1205 NOT_PRODUCT( count_compiled_calls(true/*at_method_entry*/, false/*is_inline*/); )
1206
1207 if (C->env()->dtrace_method_probes()) {
1208 make_dtrace_method_entry(method());
1209 }
1210
1211 #ifdef ASSERT
1212 // Narrow receiver type when it is too broad for the method being parsed.
1213 if (!method()->is_static()) {
1214 ciInstanceKlass* callee_holder = method()->holder();
1215 const Type* holder_type = TypeInstPtr::make(TypePtr::BotPTR, callee_holder, Type::trust_interfaces);
1216
1217 Node* receiver_obj = local(0);
1218 const TypeInstPtr* receiver_type = _gvn.type(receiver_obj)->isa_instptr();
1219
1220 if (receiver_type != nullptr && !receiver_type->higher_equal(holder_type)) {
1221 // Receiver should always be a subtype of callee holder.
1222 // But, since C2 type system doesn't properly track interfaces,
1223 // the invariant can't be expressed in the type system for default methods.
1224 // Example: for unrelated C <: I and D <: I, (C `meet` D) = Object </: I.
1225 assert(callee_holder->is_interface(), "missing subtype check");
1226
1227 // Perform dynamic receiver subtype check against callee holder class w/ a halt on failure.
1228 Node* holder_klass = _gvn.makecon(TypeKlassPtr::make(callee_holder, Type::trust_interfaces));
1229 Node* not_subtype_ctrl = gen_subtype_check(receiver_obj, holder_klass);
1230 assert(!stopped(), "not a subtype");
1577 // to produce successors for trapping blocks.
1578 int trap_index = block()->flow()->trap_index();
1579 assert(trap_index != 0, "trap index must be valid");
1580 uncommon_trap(trap_index);
1581 break;
1582 }
1583
1584 NOT_PRODUCT( parse_histogram()->set_initial_state(bc()); );
1585
1586 #ifdef ASSERT
1587 int pre_bc_sp = sp();
1588 int inputs, depth;
1589 bool have_se = !stopped() && compute_stack_effects(inputs, depth);
1590 assert(!have_se || pre_bc_sp >= inputs, "have enough stack to execute this BC: pre_bc_sp=%d, inputs=%d", pre_bc_sp, inputs);
1591 #endif //ASSERT
1592
1593 do_one_bytecode();
1594 if (failing()) return;
1595
1596 assert(!have_se || stopped() || failing() || (sp() - pre_bc_sp) == depth,
1597 "incorrect depth prediction: sp=%d, pre_bc_sp=%d, depth=%d", sp(), pre_bc_sp, depth);
1598
1599 do_exceptions();
1600
1601 NOT_PRODUCT( parse_histogram()->record_change(); );
1602
1603 if (log != nullptr)
1604 log->clear_context(); // skip marker if nothing was printed
1605
1606 // Fall into next bytecode. Each bytecode normally has 1 sequential
1607 // successor which is typically made ready by visiting this bytecode.
1608 // If the successor has several predecessors, then it is a merge
1609 // point, starts a new basic block, and is handled like other basic blocks.
1610 }
1611 }
1612
1613
1614 //------------------------------merge------------------------------------------
1615 void Parse::set_parse_bci(int bci) {
1616 set_bci(bci);
1617 Node_Notes* nn = C->default_node_notes();
2159
2160 Node* fast_io = call->in(TypeFunc::I_O);
2161 Node* fast_mem = call->in(TypeFunc::Memory);
2162 // These two phis are pre-filled with copies of of the fast IO and Memory
2163 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
2164 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2165
2166 result_rgn->init_req(2, control());
2167 io_phi ->init_req(2, i_o());
2168 mem_phi ->init_req(2, reset_memory());
2169
2170 set_all_memory( _gvn.transform(mem_phi) );
2171 set_i_o( _gvn.transform(io_phi) );
2172 }
2173
2174 set_control( _gvn.transform(result_rgn) );
2175 }
2176
2177 // Add check to deoptimize once holder klass is fully initialized.
2178 void Parse::clinit_deopt() {
2179 assert(C->has_method(), "only for normal compilations");
2180 assert(depth() == 1, "only for main compiled method");
2181 assert(is_normal_parse(), "no barrier needed on osr entry");
2182 assert(!method()->holder()->is_not_initialized(), "initialization should have been started");
2183
2184 set_parse_bci(0);
2185
2186 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
2187 guard_klass_being_initialized(holder);
2188 }
2189
2190 //------------------------------return_current---------------------------------
2191 // Append current _map to _exit_return
2192 void Parse::return_current(Node* value) {
2193 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2194 call_register_finalizer();
2195 }
2196
2197 // Do not set_parse_bci, so that return goo is credited to the return insn.
2198 set_bci(InvocationEntryBci);
|
21 * questions.
22 *
23 */
24
25 #include "compiler/compileLog.hpp"
26 #include "interpreter/linkResolver.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "oops/method.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/c2compiler.hpp"
31 #include "opto/castnode.hpp"
32 #include "opto/idealGraphPrinter.hpp"
33 #include "opto/locknode.hpp"
34 #include "opto/memnode.hpp"
35 #include "opto/opaquenode.hpp"
36 #include "opto/parse.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "runtime/handles.inline.hpp"
41 #include "runtime/runtimeUpcalls.hpp"
42 #include "runtime/safepointMechanism.hpp"
43 #include "runtime/sharedRuntime.hpp"
44 #include "utilities/bitMap.inline.hpp"
45 #include "utilities/copy.hpp"
46
47 // Static array so we can figure out which bytecodes stop us from compiling
48 // the most. Some of the non-static variables are needed in bytecodeInfo.cpp
49 // and eventually should be encapsulated in a proper class (gri 8/18/98).
50
51 #ifndef PRODUCT
52 uint nodes_created = 0;
53 uint methods_parsed = 0;
54 uint methods_seen = 0;
55 uint blocks_parsed = 0;
56 uint blocks_seen = 0;
57
58 uint explicit_null_checks_inserted = 0;
59 uint explicit_null_checks_elided = 0;
60 uint all_null_checks_found = 0;
61 uint implicit_null_checks = 0;
1117 _exits.map()->apply_replaced_nodes(_new_idx);
1118 }
1119
1120 //-----------------------------create_entry_map-------------------------------
1121 // Initialize our parser map to contain the types at method entry.
1122 // For OSR, the map contains a single RawPtr parameter.
1123 // Initial monitor locking for sync. methods is performed by do_method_entry.
1124 SafePointNode* Parse::create_entry_map() {
1125 // Check for really stupid bail-out cases.
1126 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1127 if (len >= 32760) {
1128 // Bailout expected, this is a very rare edge case.
1129 C->record_method_not_compilable("too many local variables");
1130 return nullptr;
1131 }
1132
1133 // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1134 _caller->map()->delete_replaced_nodes();
1135
1136 // If this is an inlined method, we may have to do a receiver null check.
1137 if (_caller->has_method() && is_normal_parse()) {
1138 GraphKit kit(_caller);
1139 if (!method()->is_static()) {
1140 kit.null_check_receiver_before_call(method());
1141 } else if (C->do_clinit_barriers() && C->needs_clinit_barrier(method()->holder(), _caller->method())) {
1142 ciMethod* declared_method = kit.method()->get_method_at_bci(kit.bci());
1143 const int nargs = declared_method->arg_size();
1144 kit.inc_sp(nargs);
1145 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
1146 kit.guard_klass_is_initialized(holder);
1147 kit.dec_sp(nargs);
1148 }
1149 _caller = kit.transfer_exceptions_into_jvms();
1150 if (kit.stopped()) {
1151 _exits.add_exception_states_from(_caller);
1152 _exits.set_jvms(_caller);
1153 return nullptr;
1154 }
1155 }
1156
1157 assert(method() != nullptr, "parser must have a method");
1158
1159 // Create an initial safepoint to hold JVM state during parsing
1160 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : nullptr);
1161 set_map(new SafePointNode(len, jvms));
1162
1163 // Capture receiver info for compiled lambda forms.
1164 if (method()->is_compiled_lambda_form()) {
1165 ciInstance* recv_info = _caller->compute_receiver_info(method());
1166 jvms->set_receiver_info(recv_info);
1167 }
1168
1201 map()->init_req(i, top());
1202 }
1203
1204 SafePointNode* entry_map = stop();
1205 return entry_map;
1206 }
1207
1208 //-----------------------------do_method_entry--------------------------------
1209 // Emit any code needed in the pseudo-block before BCI zero.
1210 // The main thing to do is lock the receiver of a synchronized method.
1211 void Parse::do_method_entry() {
1212 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1213 set_sp(0); // Java Stack Pointer
1214
1215 NOT_PRODUCT( count_compiled_calls(true/*at_method_entry*/, false/*is_inline*/); )
1216
1217 if (C->env()->dtrace_method_probes()) {
1218 make_dtrace_method_entry(method());
1219 }
1220
1221 install_on_method_entry_runtime_upcalls(method());
1222
1223 #ifdef ASSERT
1224 // Narrow receiver type when it is too broad for the method being parsed.
1225 if (!method()->is_static()) {
1226 ciInstanceKlass* callee_holder = method()->holder();
1227 const Type* holder_type = TypeInstPtr::make(TypePtr::BotPTR, callee_holder, Type::trust_interfaces);
1228
1229 Node* receiver_obj = local(0);
1230 const TypeInstPtr* receiver_type = _gvn.type(receiver_obj)->isa_instptr();
1231
1232 if (receiver_type != nullptr && !receiver_type->higher_equal(holder_type)) {
1233 // Receiver should always be a subtype of callee holder.
1234 // But, since C2 type system doesn't properly track interfaces,
1235 // the invariant can't be expressed in the type system for default methods.
1236 // Example: for unrelated C <: I and D <: I, (C `meet` D) = Object </: I.
1237 assert(callee_holder->is_interface(), "missing subtype check");
1238
1239 // Perform dynamic receiver subtype check against callee holder class w/ a halt on failure.
1240 Node* holder_klass = _gvn.makecon(TypeKlassPtr::make(callee_holder, Type::trust_interfaces));
1241 Node* not_subtype_ctrl = gen_subtype_check(receiver_obj, holder_klass);
1242 assert(!stopped(), "not a subtype");
1589 // to produce successors for trapping blocks.
1590 int trap_index = block()->flow()->trap_index();
1591 assert(trap_index != 0, "trap index must be valid");
1592 uncommon_trap(trap_index);
1593 break;
1594 }
1595
1596 NOT_PRODUCT( parse_histogram()->set_initial_state(bc()); );
1597
1598 #ifdef ASSERT
1599 int pre_bc_sp = sp();
1600 int inputs, depth;
1601 bool have_se = !stopped() && compute_stack_effects(inputs, depth);
1602 assert(!have_se || pre_bc_sp >= inputs, "have enough stack to execute this BC: pre_bc_sp=%d, inputs=%d", pre_bc_sp, inputs);
1603 #endif //ASSERT
1604
1605 do_one_bytecode();
1606 if (failing()) return;
1607
1608 assert(!have_se || stopped() || failing() || (sp() - pre_bc_sp) == depth,
1609 "incorrect depth prediction: bc=%s bci=%d, sp=%d, pre_bc_sp=%d, depth=%d", Bytecodes::name(bc()), bci(), sp(), pre_bc_sp, depth);
1610
1611 do_exceptions();
1612
1613 NOT_PRODUCT( parse_histogram()->record_change(); );
1614
1615 if (log != nullptr)
1616 log->clear_context(); // skip marker if nothing was printed
1617
1618 // Fall into next bytecode. Each bytecode normally has 1 sequential
1619 // successor which is typically made ready by visiting this bytecode.
1620 // If the successor has several predecessors, then it is a merge
1621 // point, starts a new basic block, and is handled like other basic blocks.
1622 }
1623 }
1624
1625
1626 //------------------------------merge------------------------------------------
1627 void Parse::set_parse_bci(int bci) {
1628 set_bci(bci);
1629 Node_Notes* nn = C->default_node_notes();
2171
2172 Node* fast_io = call->in(TypeFunc::I_O);
2173 Node* fast_mem = call->in(TypeFunc::Memory);
2174 // These two phis are pre-filled with copies of of the fast IO and Memory
2175 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
2176 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2177
2178 result_rgn->init_req(2, control());
2179 io_phi ->init_req(2, i_o());
2180 mem_phi ->init_req(2, reset_memory());
2181
2182 set_all_memory( _gvn.transform(mem_phi) );
2183 set_i_o( _gvn.transform(io_phi) );
2184 }
2185
2186 set_control( _gvn.transform(result_rgn) );
2187 }
2188
2189 // Add check to deoptimize once holder klass is fully initialized.
2190 void Parse::clinit_deopt() {
2191 if (method()->holder()->is_initialized()) {
2192 return; // in case do_clinit_barriers() is true
2193 }
2194 assert(C->has_method(), "only for normal compilations");
2195 assert(depth() == 1, "only for main compiled method");
2196 assert(is_normal_parse(), "no barrier needed on osr entry");
2197 assert(!method()->holder()->is_not_initialized(), "initialization should have been started");
2198
2199 set_parse_bci(0);
2200
2201 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
2202 guard_klass_being_initialized(holder);
2203 }
2204
2205 //------------------------------return_current---------------------------------
2206 // Append current _map to _exit_return
2207 void Parse::return_current(Node* value) {
2208 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2209 call_register_finalizer();
2210 }
2211
2212 // Do not set_parse_bci, so that return goo is credited to the return insn.
2213 set_bci(InvocationEntryBci);
|