21 * questions.
22 *
23 */
24
25 #include "compiler/compileLog.hpp"
26 #include "interpreter/linkResolver.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "oops/method.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/c2compiler.hpp"
31 #include "opto/castnode.hpp"
32 #include "opto/idealGraphPrinter.hpp"
33 #include "opto/locknode.hpp"
34 #include "opto/memnode.hpp"
35 #include "opto/opaquenode.hpp"
36 #include "opto/parse.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "runtime/handles.inline.hpp"
41 #include "runtime/safepointMechanism.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "utilities/bitMap.inline.hpp"
44 #include "utilities/copy.hpp"
45
46 // Static array so we can figure out which bytecodes stop us from compiling
47 // the most. Some of the non-static variables are needed in bytecodeInfo.cpp
48 // and eventually should be encapsulated in a proper class (gri 8/18/98).
49
50 #ifndef PRODUCT
51 uint nodes_created = 0;
52 uint methods_parsed = 0;
53 uint methods_seen = 0;
54 uint blocks_parsed = 0;
55 uint blocks_seen = 0;
56
57 uint explicit_null_checks_inserted = 0;
58 uint explicit_null_checks_elided = 0;
59 uint all_null_checks_found = 0;
60 uint implicit_null_checks = 0;
1115 _exits.map()->apply_replaced_nodes(_new_idx);
1116 }
1117
1118 //-----------------------------create_entry_map-------------------------------
1119 // Initialize our parser map to contain the types at method entry.
1120 // For OSR, the map contains a single RawPtr parameter.
1121 // Initial monitor locking for sync. methods is performed by do_method_entry.
1122 SafePointNode* Parse::create_entry_map() {
1123 // Check for really stupid bail-out cases.
1124 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1125 if (len >= 32760) {
1126 // Bailout expected, this is a very rare edge case.
1127 C->record_method_not_compilable("too many local variables");
1128 return nullptr;
1129 }
1130
1131 // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1132 _caller->map()->delete_replaced_nodes();
1133
1134 // If this is an inlined method, we may have to do a receiver null check.
1135 if (_caller->has_method() && is_normal_parse() && !method()->is_static()) {
1136 GraphKit kit(_caller);
1137 kit.null_check_receiver_before_call(method());
1138 _caller = kit.transfer_exceptions_into_jvms();
1139 if (kit.stopped()) {
1140 _exits.add_exception_states_from(_caller);
1141 _exits.set_jvms(_caller);
1142 return nullptr;
1143 }
1144 }
1145
1146 assert(method() != nullptr, "parser must have a method");
1147
1148 // Create an initial safepoint to hold JVM state during parsing
1149 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : nullptr);
1150 set_map(new SafePointNode(len, jvms));
1151
1152 // Capture receiver info for compiled lambda forms.
1153 if (method()->is_compiled_lambda_form()) {
1154 ciInstance* recv_info = _caller->compute_receiver_info(method());
1155 jvms->set_receiver_info(recv_info);
1156 }
1157
1190 map()->init_req(i, top());
1191 }
1192
1193 SafePointNode* entry_map = stop();
1194 return entry_map;
1195 }
1196
1197 //-----------------------------do_method_entry--------------------------------
1198 // Emit any code needed in the pseudo-block before BCI zero.
1199 // The main thing to do is lock the receiver of a synchronized method.
1200 void Parse::do_method_entry() {
1201 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1202 set_sp(0); // Java Stack Pointer
1203
1204 NOT_PRODUCT( count_compiled_calls(true/*at_method_entry*/, false/*is_inline*/); )
1205
1206 if (C->env()->dtrace_method_probes()) {
1207 make_dtrace_method_entry(method());
1208 }
1209
1210 #ifdef ASSERT
1211 // Narrow receiver type when it is too broad for the method being parsed.
1212 if (!method()->is_static()) {
1213 ciInstanceKlass* callee_holder = method()->holder();
1214 const Type* holder_type = TypeInstPtr::make(TypePtr::BotPTR, callee_holder, Type::trust_interfaces);
1215
1216 Node* receiver_obj = local(0);
1217 const TypeInstPtr* receiver_type = _gvn.type(receiver_obj)->isa_instptr();
1218
1219 if (receiver_type != nullptr && !receiver_type->higher_equal(holder_type)) {
1220 // Receiver should always be a subtype of callee holder.
1221 // But, since C2 type system doesn't properly track interfaces,
1222 // the invariant can't be expressed in the type system for default methods.
1223 // Example: for unrelated C <: I and D <: I, (C `meet` D) = Object </: I.
1224 assert(callee_holder->is_interface(), "missing subtype check");
1225
1226 // Perform dynamic receiver subtype check against callee holder class w/ a halt on failure.
1227 Node* holder_klass = _gvn.makecon(TypeKlassPtr::make(callee_holder, Type::trust_interfaces));
1228 Node* not_subtype_ctrl = gen_subtype_check(receiver_obj, holder_klass);
1229 assert(!stopped(), "not a subtype");
1575 // to produce successors for trapping blocks.
1576 int trap_index = block()->flow()->trap_index();
1577 assert(trap_index != 0, "trap index must be valid");
1578 uncommon_trap(trap_index);
1579 break;
1580 }
1581
1582 NOT_PRODUCT( parse_histogram()->set_initial_state(bc()); );
1583
1584 #ifdef ASSERT
1585 int pre_bc_sp = sp();
1586 int inputs, depth;
1587 bool have_se = !stopped() && compute_stack_effects(inputs, depth);
1588 assert(!have_se || pre_bc_sp >= inputs, "have enough stack to execute this BC: pre_bc_sp=%d, inputs=%d", pre_bc_sp, inputs);
1589 #endif //ASSERT
1590
1591 do_one_bytecode();
1592 if (failing()) return;
1593
1594 assert(!have_se || stopped() || failing() || (sp() - pre_bc_sp) == depth,
1595 "incorrect depth prediction: sp=%d, pre_bc_sp=%d, depth=%d", sp(), pre_bc_sp, depth);
1596
1597 do_exceptions();
1598
1599 NOT_PRODUCT( parse_histogram()->record_change(); );
1600
1601 if (log != nullptr)
1602 log->clear_context(); // skip marker if nothing was printed
1603
1604 // Fall into next bytecode. Each bytecode normally has 1 sequential
1605 // successor which is typically made ready by visiting this bytecode.
1606 // If the successor has several predecessors, then it is a merge
1607 // point, starts a new basic block, and is handled like other basic blocks.
1608 }
1609 }
1610
1611
1612 //------------------------------merge------------------------------------------
1613 void Parse::set_parse_bci(int bci) {
1614 set_bci(bci);
1615 Node_Notes* nn = C->default_node_notes();
2157
2158 Node* fast_io = call->in(TypeFunc::I_O);
2159 Node* fast_mem = call->in(TypeFunc::Memory);
2160 // These two phis are pre-filled with copies of of the fast IO and Memory
2161 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
2162 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2163
2164 result_rgn->init_req(2, control());
2165 io_phi ->init_req(2, i_o());
2166 mem_phi ->init_req(2, reset_memory());
2167
2168 set_all_memory( _gvn.transform(mem_phi) );
2169 set_i_o( _gvn.transform(io_phi) );
2170 }
2171
2172 set_control( _gvn.transform(result_rgn) );
2173 }
2174
2175 // Add check to deoptimize once holder klass is fully initialized.
2176 void Parse::clinit_deopt() {
2177 assert(C->has_method(), "only for normal compilations");
2178 assert(depth() == 1, "only for main compiled method");
2179 assert(is_normal_parse(), "no barrier needed on osr entry");
2180 assert(!method()->holder()->is_not_initialized(), "initialization should have been started");
2181
2182 set_parse_bci(0);
2183
2184 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
2185 guard_klass_being_initialized(holder);
2186 }
2187
2188 //------------------------------return_current---------------------------------
2189 // Append current _map to _exit_return
2190 void Parse::return_current(Node* value) {
2191 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2192 call_register_finalizer();
2193 }
2194
2195 // Do not set_parse_bci, so that return goo is credited to the return insn.
2196 set_bci(InvocationEntryBci);
|
21 * questions.
22 *
23 */
24
25 #include "compiler/compileLog.hpp"
26 #include "interpreter/linkResolver.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "oops/method.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/c2compiler.hpp"
31 #include "opto/castnode.hpp"
32 #include "opto/idealGraphPrinter.hpp"
33 #include "opto/locknode.hpp"
34 #include "opto/memnode.hpp"
35 #include "opto/opaquenode.hpp"
36 #include "opto/parse.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "runtime/handles.inline.hpp"
41 #include "runtime/runtimeUpcalls.hpp"
42 #include "runtime/safepointMechanism.hpp"
43 #include "runtime/sharedRuntime.hpp"
44 #include "utilities/bitMap.inline.hpp"
45 #include "utilities/copy.hpp"
46
47 // Static array so we can figure out which bytecodes stop us from compiling
48 // the most. Some of the non-static variables are needed in bytecodeInfo.cpp
49 // and eventually should be encapsulated in a proper class (gri 8/18/98).
50
51 #ifndef PRODUCT
52 uint nodes_created = 0;
53 uint methods_parsed = 0;
54 uint methods_seen = 0;
55 uint blocks_parsed = 0;
56 uint blocks_seen = 0;
57
58 uint explicit_null_checks_inserted = 0;
59 uint explicit_null_checks_elided = 0;
60 uint all_null_checks_found = 0;
61 uint implicit_null_checks = 0;
1116 _exits.map()->apply_replaced_nodes(_new_idx);
1117 }
1118
1119 //-----------------------------create_entry_map-------------------------------
1120 // Initialize our parser map to contain the types at method entry.
1121 // For OSR, the map contains a single RawPtr parameter.
1122 // Initial monitor locking for sync. methods is performed by do_method_entry.
1123 SafePointNode* Parse::create_entry_map() {
1124 // Check for really stupid bail-out cases.
1125 uint len = TypeFunc::Parms + method()->max_locals() + method()->max_stack();
1126 if (len >= 32760) {
1127 // Bailout expected, this is a very rare edge case.
1128 C->record_method_not_compilable("too many local variables");
1129 return nullptr;
1130 }
1131
1132 // clear current replaced nodes that are of no use from here on (map was cloned in build_exits).
1133 _caller->map()->delete_replaced_nodes();
1134
1135 // If this is an inlined method, we may have to do a receiver null check.
1136 if (_caller->has_method() && is_normal_parse()) {
1137 GraphKit kit(_caller);
1138 if (!method()->is_static()) {
1139 kit.null_check_receiver_before_call(method());
1140 } else if (C->do_clinit_barriers() && C->needs_clinit_barrier(method()->holder(), _caller->method())) {
1141 ciMethod* declared_method = kit.method()->get_method_at_bci(kit.bci());
1142 const int nargs = declared_method->arg_size();
1143 kit.inc_sp(nargs);
1144 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
1145 kit.guard_klass_is_initialized(holder);
1146 kit.dec_sp(nargs);
1147 }
1148 _caller = kit.transfer_exceptions_into_jvms();
1149 if (kit.stopped()) {
1150 _exits.add_exception_states_from(_caller);
1151 _exits.set_jvms(_caller);
1152 return nullptr;
1153 }
1154 }
1155
1156 assert(method() != nullptr, "parser must have a method");
1157
1158 // Create an initial safepoint to hold JVM state during parsing
1159 JVMState* jvms = new (C) JVMState(method(), _caller->has_method() ? _caller : nullptr);
1160 set_map(new SafePointNode(len, jvms));
1161
1162 // Capture receiver info for compiled lambda forms.
1163 if (method()->is_compiled_lambda_form()) {
1164 ciInstance* recv_info = _caller->compute_receiver_info(method());
1165 jvms->set_receiver_info(recv_info);
1166 }
1167
1200 map()->init_req(i, top());
1201 }
1202
1203 SafePointNode* entry_map = stop();
1204 return entry_map;
1205 }
1206
1207 //-----------------------------do_method_entry--------------------------------
1208 // Emit any code needed in the pseudo-block before BCI zero.
1209 // The main thing to do is lock the receiver of a synchronized method.
1210 void Parse::do_method_entry() {
1211 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1212 set_sp(0); // Java Stack Pointer
1213
1214 NOT_PRODUCT( count_compiled_calls(true/*at_method_entry*/, false/*is_inline*/); )
1215
1216 if (C->env()->dtrace_method_probes()) {
1217 make_dtrace_method_entry(method());
1218 }
1219
1220 install_on_method_entry_runtime_upcalls(method());
1221
1222 #ifdef ASSERT
1223 // Narrow receiver type when it is too broad for the method being parsed.
1224 if (!method()->is_static()) {
1225 ciInstanceKlass* callee_holder = method()->holder();
1226 const Type* holder_type = TypeInstPtr::make(TypePtr::BotPTR, callee_holder, Type::trust_interfaces);
1227
1228 Node* receiver_obj = local(0);
1229 const TypeInstPtr* receiver_type = _gvn.type(receiver_obj)->isa_instptr();
1230
1231 if (receiver_type != nullptr && !receiver_type->higher_equal(holder_type)) {
1232 // Receiver should always be a subtype of callee holder.
1233 // But, since C2 type system doesn't properly track interfaces,
1234 // the invariant can't be expressed in the type system for default methods.
1235 // Example: for unrelated C <: I and D <: I, (C `meet` D) = Object </: I.
1236 assert(callee_holder->is_interface(), "missing subtype check");
1237
1238 // Perform dynamic receiver subtype check against callee holder class w/ a halt on failure.
1239 Node* holder_klass = _gvn.makecon(TypeKlassPtr::make(callee_holder, Type::trust_interfaces));
1240 Node* not_subtype_ctrl = gen_subtype_check(receiver_obj, holder_klass);
1241 assert(!stopped(), "not a subtype");
1587 // to produce successors for trapping blocks.
1588 int trap_index = block()->flow()->trap_index();
1589 assert(trap_index != 0, "trap index must be valid");
1590 uncommon_trap(trap_index);
1591 break;
1592 }
1593
1594 NOT_PRODUCT( parse_histogram()->set_initial_state(bc()); );
1595
1596 #ifdef ASSERT
1597 int pre_bc_sp = sp();
1598 int inputs, depth;
1599 bool have_se = !stopped() && compute_stack_effects(inputs, depth);
1600 assert(!have_se || pre_bc_sp >= inputs, "have enough stack to execute this BC: pre_bc_sp=%d, inputs=%d", pre_bc_sp, inputs);
1601 #endif //ASSERT
1602
1603 do_one_bytecode();
1604 if (failing()) return;
1605
1606 assert(!have_se || stopped() || failing() || (sp() - pre_bc_sp) == depth,
1607 "incorrect depth prediction: bc=%s bci=%d, sp=%d, pre_bc_sp=%d, depth=%d", Bytecodes::name(bc()), bci(), sp(), pre_bc_sp, depth);
1608
1609 do_exceptions();
1610
1611 NOT_PRODUCT( parse_histogram()->record_change(); );
1612
1613 if (log != nullptr)
1614 log->clear_context(); // skip marker if nothing was printed
1615
1616 // Fall into next bytecode. Each bytecode normally has 1 sequential
1617 // successor which is typically made ready by visiting this bytecode.
1618 // If the successor has several predecessors, then it is a merge
1619 // point, starts a new basic block, and is handled like other basic blocks.
1620 }
1621 }
1622
1623
1624 //------------------------------merge------------------------------------------
1625 void Parse::set_parse_bci(int bci) {
1626 set_bci(bci);
1627 Node_Notes* nn = C->default_node_notes();
2169
2170 Node* fast_io = call->in(TypeFunc::I_O);
2171 Node* fast_mem = call->in(TypeFunc::Memory);
2172 // These two phis are pre-filled with copies of of the fast IO and Memory
2173 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
2174 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2175
2176 result_rgn->init_req(2, control());
2177 io_phi ->init_req(2, i_o());
2178 mem_phi ->init_req(2, reset_memory());
2179
2180 set_all_memory( _gvn.transform(mem_phi) );
2181 set_i_o( _gvn.transform(io_phi) );
2182 }
2183
2184 set_control( _gvn.transform(result_rgn) );
2185 }
2186
2187 // Add check to deoptimize once holder klass is fully initialized.
2188 void Parse::clinit_deopt() {
2189 if (method()->holder()->is_initialized()) {
2190 return; // in case do_clinit_barriers() is true
2191 }
2192 assert(C->has_method(), "only for normal compilations");
2193 assert(depth() == 1, "only for main compiled method");
2194 assert(is_normal_parse(), "no barrier needed on osr entry");
2195 assert(!method()->holder()->is_not_initialized(), "initialization should have been started");
2196
2197 set_parse_bci(0);
2198
2199 Node* holder = makecon(TypeKlassPtr::make(method()->holder(), Type::trust_interfaces));
2200 guard_klass_being_initialized(holder);
2201 }
2202
2203 //------------------------------return_current---------------------------------
2204 // Append current _map to _exit_return
2205 void Parse::return_current(Node* value) {
2206 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2207 call_register_finalizer();
2208 }
2209
2210 // Do not set_parse_bci, so that return goo is credited to the return insn.
2211 set_bci(InvocationEntryBci);
|