1280 #ifndef PRODUCT
1281 if (PrintEliminateAllocations) {
1282 tty->print("NotUsed ");
1283 Node* res = alloc->proj_out_or_null(TypeFunc::Parms);
1284 if (res != nullptr) {
1285 res->dump();
1286 } else {
1287 alloc->dump();
1288 }
1289 }
1290 #endif
1291 yank_alloc_node(alloc);
1292 return;
1293 }
1294 }
1295
1296 enum { too_big_or_final_path = 1, need_gc_path = 2 };
1297 Node *slow_region = nullptr;
1298 Node *toobig_false = ctrl;
1299
1300 // generate the initial test if necessary
1301 if (initial_slow_test != nullptr ) {
1302 assert (expand_fast_path, "Only need test if there is a fast path");
1303 slow_region = new RegionNode(3);
1304
1305 // Now make the initial failure test. Usually a too-big test but
1306 // might be a TRUE for finalizers or a fancy class check for
1307 // newInstance0.
1308 IfNode *toobig_iff = new IfNode(ctrl, initial_slow_test, PROB_MIN, COUNT_UNKNOWN);
1309 transform_later(toobig_iff);
1310 // Plug the failing-too-big test into the slow-path region
1311 Node *toobig_true = new IfTrueNode( toobig_iff );
1312 transform_later(toobig_true);
1313 slow_region ->init_req( too_big_or_final_path, toobig_true );
1314 toobig_false = new IfFalseNode( toobig_iff );
1315 transform_later(toobig_false);
1316 } else {
1317 // No initial test, just fall into next case
1318 assert(allocation_has_use || !expand_fast_path, "Should already have been handled");
1319 toobig_false = ctrl;
1933 // transformations (CCP) it may point to only one non escaping object
1934 // (but still using phi), corresponding locks and unlocks will be marked
1935 // for elimination. Later obj could be replaced with a new node (new phi)
1936 // and which does not have escape information. And later after some graph
1937 // reshape other locks and unlocks (which were not marked for elimination
1938 // before) are connected to this new obj (phi) but they still will not be
1939 // marked for elimination since new obj has no escape information.
1940 // Mark all associated (same box and obj) lock and unlock nodes for
1941 // elimination if some of them marked already.
1942 void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
1943 if (oldbox->as_BoxLock()->is_eliminated()) {
1944 return; // This BoxLock node was processed already.
1945 }
1946 // New implementation (EliminateNestedLocks) has separate BoxLock
1947 // node for each locked region so mark all associated locks/unlocks as
1948 // eliminated even if different objects are referenced in one locked region
1949 // (for example, OSR compilation of nested loop inside locked scope).
1950 if (EliminateNestedLocks ||
1951 oldbox->as_BoxLock()->is_simple_lock_region(nullptr, obj, nullptr)) {
1952 // Box is used only in one lock region. Mark this box as eliminated.
1953 _igvn.hash_delete(oldbox);
1954 oldbox->as_BoxLock()->set_eliminated(); // This changes box's hash value
1955 _igvn.hash_insert(oldbox);
1956
1957 for (uint i = 0; i < oldbox->outcnt(); i++) {
1958 Node* u = oldbox->raw_out(i);
1959 if (u->is_AbstractLock() && !u->as_AbstractLock()->is_non_esc_obj()) {
1960 AbstractLockNode* alock = u->as_AbstractLock();
1961 // Check lock's box since box could be referenced by Lock's debug info.
1962 if (alock->box_node() == oldbox) {
1963 // Mark eliminated all related locks and unlocks.
1964 #ifdef ASSERT
1965 alock->log_lock_optimization(C, "eliminate_lock_set_non_esc4");
1966 #endif
1967 alock->set_non_esc_obj();
1968 }
1969 }
1970 }
1971 return;
1972 }
1973
1974 // Create new "eliminated" BoxLock node and use it in monitor debug info
1975 // instead of oldbox for the same object.
1976 BoxLockNode* newbox = oldbox->clone()->as_BoxLock();
1977
1978 // Note: BoxLock node is marked eliminated only here and it is used
1979 // to indicate that all associated lock and unlock nodes are marked
1980 // for elimination.
1981 newbox->set_eliminated();
1982 transform_later(newbox);
1983
1984 // Replace old box node with new box for all users of the same object.
1985 for (uint i = 0; i < oldbox->outcnt();) {
1986 bool next_edge = true;
1987
1988 Node* u = oldbox->raw_out(i);
1989 if (u->is_AbstractLock()) {
1990 AbstractLockNode* alock = u->as_AbstractLock();
2095 }
2096 }
2097 }
2098 }
2099
2100 // we have determined that this lock/unlock can be eliminated, we simply
2101 // eliminate the node without expanding it.
2102 //
2103 // Note: The membar's associated with the lock/unlock are currently not
2104 // eliminated. This should be investigated as a future enhancement.
2105 //
2106 bool PhaseMacroExpand::eliminate_locking_node(AbstractLockNode *alock) {
2107
2108 if (!alock->is_eliminated()) {
2109 return false;
2110 }
2111 #ifdef ASSERT
2112 if (!alock->is_coarsened()) {
2113 // Check that new "eliminated" BoxLock node is created.
2114 BoxLockNode* oldbox = alock->box_node()->as_BoxLock();
2115 assert(oldbox->is_eliminated(), "should be done already");
2116 }
2117 #endif
2118
2119 alock->log_lock_optimization(C, "eliminate_lock");
2120
2121 #ifndef PRODUCT
2122 if (PrintEliminateLocks) {
2123 tty->print_cr("++++ Eliminated: %d %s '%s'", alock->_idx, (alock->is_Lock() ? "Lock" : "Unlock"), alock->kind_as_string());
2124 }
2125 #endif
2126
2127 Node* mem = alock->in(TypeFunc::Memory);
2128 Node* ctrl = alock->in(TypeFunc::Control);
2129 guarantee(ctrl != nullptr, "missing control projection, cannot replace_node() with null");
2130
2131 alock->extract_projections(&_callprojs, false /*separate_io_proj*/, false /*do_asserts*/);
2132 // There are 2 projections from the lock. The lock node will
2133 // be deleted when its last use is subsumed below.
2134 assert(alock->outcnt() == 2 &&
2135 _callprojs.fallthrough_proj != nullptr &&
|
1280 #ifndef PRODUCT
1281 if (PrintEliminateAllocations) {
1282 tty->print("NotUsed ");
1283 Node* res = alloc->proj_out_or_null(TypeFunc::Parms);
1284 if (res != nullptr) {
1285 res->dump();
1286 } else {
1287 alloc->dump();
1288 }
1289 }
1290 #endif
1291 yank_alloc_node(alloc);
1292 return;
1293 }
1294 }
1295
1296 enum { too_big_or_final_path = 1, need_gc_path = 2 };
1297 Node *slow_region = nullptr;
1298 Node *toobig_false = ctrl;
1299
1300 if (PEAParanoid && alloc->materialized_cnt() > 0) {
1301 fatal("[PEA] Expanding obj#%d which has been materialized.", alloc->_idx);
1302 }
1303
1304 // generate the initial test if necessary
1305 if (initial_slow_test != nullptr ) {
1306 assert (expand_fast_path, "Only need test if there is a fast path");
1307 slow_region = new RegionNode(3);
1308
1309 // Now make the initial failure test. Usually a too-big test but
1310 // might be a TRUE for finalizers or a fancy class check for
1311 // newInstance0.
1312 IfNode *toobig_iff = new IfNode(ctrl, initial_slow_test, PROB_MIN, COUNT_UNKNOWN);
1313 transform_later(toobig_iff);
1314 // Plug the failing-too-big test into the slow-path region
1315 Node *toobig_true = new IfTrueNode( toobig_iff );
1316 transform_later(toobig_true);
1317 slow_region ->init_req( too_big_or_final_path, toobig_true );
1318 toobig_false = new IfFalseNode( toobig_iff );
1319 transform_later(toobig_false);
1320 } else {
1321 // No initial test, just fall into next case
1322 assert(allocation_has_use || !expand_fast_path, "Should already have been handled");
1323 toobig_false = ctrl;
1937 // transformations (CCP) it may point to only one non escaping object
1938 // (but still using phi), corresponding locks and unlocks will be marked
1939 // for elimination. Later obj could be replaced with a new node (new phi)
1940 // and which does not have escape information. And later after some graph
1941 // reshape other locks and unlocks (which were not marked for elimination
1942 // before) are connected to this new obj (phi) but they still will not be
1943 // marked for elimination since new obj has no escape information.
1944 // Mark all associated (same box and obj) lock and unlock nodes for
1945 // elimination if some of them marked already.
1946 void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
1947 if (oldbox->as_BoxLock()->is_eliminated()) {
1948 return; // This BoxLock node was processed already.
1949 }
1950 // New implementation (EliminateNestedLocks) has separate BoxLock
1951 // node for each locked region so mark all associated locks/unlocks as
1952 // eliminated even if different objects are referenced in one locked region
1953 // (for example, OSR compilation of nested loop inside locked scope).
1954 if (EliminateNestedLocks ||
1955 oldbox->as_BoxLock()->is_simple_lock_region(nullptr, obj, nullptr)) {
1956 // Box is used only in one lock region. Mark this box as eliminated.
1957 int locks = 0;
1958 for (uint i = 0; i < oldbox->outcnt(); i++) {
1959 Node* u = oldbox->raw_out(i);
1960 if (u->is_AbstractLock() && !u->as_AbstractLock()->is_non_esc_obj()) {
1961 AbstractLockNode* alock = u->as_AbstractLock();
1962 // Check lock's box since box could be referenced by Lock's debug info.
1963 if (alock->box_node() == oldbox) {
1964 locks++;
1965
1966 if (alock->obj_node() == obj) {
1967 // Mark eliminated all related locks and unlocks.
1968 #ifdef ASSERT
1969 alock->log_lock_optimization(C, "eliminate_lock_set_non_esc4");
1970 #endif
1971 alock->set_non_esc_obj();
1972 locks--;
1973 }
1974 }
1975 }
1976 }
1977 if (locks == 0) {
1978 _igvn.hash_delete(oldbox);
1979 oldbox->as_BoxLock()->set_eliminated(); // This changes box's hash value
1980 _igvn.hash_insert(oldbox);
1981 }
1982
1983 return;
1984 }
1985
1986 // Create new "eliminated" BoxLock node and use it in monitor debug info
1987 // instead of oldbox for the same object.
1988 BoxLockNode* newbox = oldbox->clone()->as_BoxLock();
1989
1990 // Note: BoxLock node is marked eliminated only here and it is used
1991 // to indicate that all associated lock and unlock nodes are marked
1992 // for elimination.
1993 newbox->set_eliminated();
1994 transform_later(newbox);
1995
1996 // Replace old box node with new box for all users of the same object.
1997 for (uint i = 0; i < oldbox->outcnt();) {
1998 bool next_edge = true;
1999
2000 Node* u = oldbox->raw_out(i);
2001 if (u->is_AbstractLock()) {
2002 AbstractLockNode* alock = u->as_AbstractLock();
2107 }
2108 }
2109 }
2110 }
2111
2112 // we have determined that this lock/unlock can be eliminated, we simply
2113 // eliminate the node without expanding it.
2114 //
2115 // Note: The membar's associated with the lock/unlock are currently not
2116 // eliminated. This should be investigated as a future enhancement.
2117 //
2118 bool PhaseMacroExpand::eliminate_locking_node(AbstractLockNode *alock) {
2119
2120 if (!alock->is_eliminated()) {
2121 return false;
2122 }
2123 #ifdef ASSERT
2124 if (!alock->is_coarsened()) {
2125 // Check that new "eliminated" BoxLock node is created.
2126 BoxLockNode* oldbox = alock->box_node()->as_BoxLock();
2127 assert(oldbox->is_eliminated() || DoPartialEscapeAnalysis, "should be done already");
2128 }
2129 #endif
2130
2131 alock->log_lock_optimization(C, "eliminate_lock");
2132
2133 #ifndef PRODUCT
2134 if (PrintEliminateLocks) {
2135 tty->print_cr("++++ Eliminated: %d %s '%s'", alock->_idx, (alock->is_Lock() ? "Lock" : "Unlock"), alock->kind_as_string());
2136 }
2137 #endif
2138
2139 Node* mem = alock->in(TypeFunc::Memory);
2140 Node* ctrl = alock->in(TypeFunc::Control);
2141 guarantee(ctrl != nullptr, "missing control projection, cannot replace_node() with null");
2142
2143 alock->extract_projections(&_callprojs, false /*separate_io_proj*/, false /*do_asserts*/);
2144 // There are 2 projections from the lock. The lock node will
2145 // be deleted when its last use is subsumed below.
2146 assert(alock->outcnt() == 2 &&
2147 _callprojs.fallthrough_proj != nullptr &&
|