464 }
465 }
466
467 // Propagate GlobalEscape and ArgEscape escape states to all nodes
468 // and check that we still have non-escaping java objects.
469 bool find_non_escaped_objects(GrowableArray<PointsToNode*>& ptnodes_worklist,
470 GrowableArray<JavaObjectNode*>& non_escaped_worklist);
471
472 // Adjust scalar_replaceable state after Connection Graph is built.
473 void adjust_scalar_replaceable_state(JavaObjectNode* jobj, Unique_Node_List &reducible_merges);
474
475 // Reevaluate Phis reducible status after 'obj' became NSR.
476 void revisit_reducible_phi_status(JavaObjectNode* jobj, Unique_Node_List& reducible_merges);
477
478 // Propagate NSR (Not scalar replaceable) state.
479 void find_scalar_replaceable_allocs(GrowableArray<JavaObjectNode*>& jobj_worklist, Unique_Node_List &reducible_merges);
480
481 // Optimize ideal graph.
482 void optimize_ideal_graph(GrowableArray<Node*>& ptr_cmp_worklist,
483 GrowableArray<MemBarStoreStoreNode*>& storestore_worklist);
484 // Optimize objects compare.
485 const TypeInt* optimize_ptr_compare(Node* left, Node* right);
486
487 // Returns unique corresponding java object or null.
488 JavaObjectNode* unique_java_object(Node *n) const;
489
490 // Add an edge of the specified type pointing to the specified target.
491 bool add_edge(PointsToNode* from, PointsToNode* to) {
492 assert(!from->is_Field() || from->as_Field()->is_oop(), "sanity");
493
494 if (to == phantom_obj) {
495 if (from->has_unknown_ptr()) {
496 return false; // already points to phantom_obj
497 }
498 from->set_has_unknown_ptr();
499 }
500
501 bool is_new = from->add_edge(to);
502 assert(to != phantom_obj || is_new, "sanity");
503 if (is_new) { // New edge?
|
464 }
465 }
466
467 // Propagate GlobalEscape and ArgEscape escape states to all nodes
468 // and check that we still have non-escaping java objects.
469 bool find_non_escaped_objects(GrowableArray<PointsToNode*>& ptnodes_worklist,
470 GrowableArray<JavaObjectNode*>& non_escaped_worklist);
471
472 // Adjust scalar_replaceable state after Connection Graph is built.
473 void adjust_scalar_replaceable_state(JavaObjectNode* jobj, Unique_Node_List &reducible_merges);
474
475 // Reevaluate Phis reducible status after 'obj' became NSR.
476 void revisit_reducible_phi_status(JavaObjectNode* jobj, Unique_Node_List& reducible_merges);
477
478 // Propagate NSR (Not scalar replaceable) state.
479 void find_scalar_replaceable_allocs(GrowableArray<JavaObjectNode*>& jobj_worklist, Unique_Node_List &reducible_merges);
480
481 // Optimize ideal graph.
482 void optimize_ideal_graph(GrowableArray<Node*>& ptr_cmp_worklist,
483 GrowableArray<MemBarStoreStoreNode*>& storestore_worklist);
484 // Expand flat accesses to accesses to each component if the object does not escape
485 void optimize_flat_accesses(GrowableArray<SafePointNode*>& sfn_worklist);
486 // Optimize objects compare.
487 const TypeInt* optimize_ptr_compare(Node* left, Node* right);
488
489 // Returns unique corresponding java object or null.
490 JavaObjectNode* unique_java_object(Node *n) const;
491
492 // Add an edge of the specified type pointing to the specified target.
493 bool add_edge(PointsToNode* from, PointsToNode* to) {
494 assert(!from->is_Field() || from->as_Field()->is_oop(), "sanity");
495
496 if (to == phantom_obj) {
497 if (from->has_unknown_ptr()) {
498 return false; // already points to phantom_obj
499 }
500 from->set_has_unknown_ptr();
501 }
502
503 bool is_new = from->add_edge(to);
504 assert(to != phantom_obj || is_new, "sanity");
505 if (is_new) { // New edge?
|