17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciSymbols.hpp"
27 #include "compiler/compileLog.hpp"
28 #include "oops/objArrayKlass.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/memnode.hpp"
31 #include "opto/mulnode.hpp"
32 #include "opto/parse.hpp"
33 #include "opto/rootnode.hpp"
34 #include "opto/runtime.hpp"
35 #include "runtime/sharedRuntime.hpp"
36
37 //------------------------------make_dtrace_method_entry_exit ----------------
38 // Dtrace -- record entry or exit of a method if compiled with dtrace support
39 void GraphKit::make_dtrace_method_entry_exit(ciMethod* method, bool is_entry) {
40 const TypeFunc *call_type = OptoRuntime::dtrace_method_entry_exit_Type();
41 address call_address = is_entry ? CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry) :
42 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit);
43 const char *call_name = is_entry ? "dtrace_method_entry" : "dtrace_method_exit";
44
45 // Get base of thread-local storage area
46 Node* thread = _gvn.transform( new ThreadLocalNode() );
47
48 // Get method
49 const TypePtr* method_type = TypeMetadataPtr::make(method);
50 Node *method_node = _gvn.transform(ConNode::make(method_type));
51
52 kill_dead_locals();
53
54 // For some reason, this call reads only raw memory.
55 const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
56 make_runtime_call(RC_LEAF | RC_NARROW_MEM,
263 }
264
265 Node* kls = makecon(TypeKlassPtr::make(klass));
266 Node* obj = new_instance(kls);
267
268 // Push resultant oop onto stack
269 push(obj);
270
271 // Keep track of whether opportunities exist for StringBuilder
272 // optimizations.
273 if (OptimizeStringConcat &&
274 (klass == C->env()->StringBuilder_klass() ||
275 klass == C->env()->StringBuffer_klass())) {
276 C->set_has_stringbuilder(true);
277 }
278
279 // Keep track of boxed values for EliminateAutoBox optimizations.
280 if (C->eliminate_boxing() && klass->is_box_klass()) {
281 C->set_has_boxed_value(true);
282 }
283 }
284
285 #ifndef PRODUCT
286 //------------------------------dump_map_adr_mem-------------------------------
287 // Debug dump of the mapping from address types to MergeMemNode indices.
288 void Parse::dump_map_adr_mem() const {
289 tty->print_cr("--- Mapping from address types to memory Nodes ---");
290 MergeMemNode *mem = map() == nullptr ? nullptr : (map()->memory()->is_MergeMem() ?
291 map()->memory()->as_MergeMem() : nullptr);
292 for (uint i = 0; i < (uint)C->num_alias_types(); i++) {
293 C->alias_type(i)->print_on(tty);
294 tty->print("\t");
295 // Node mapping, if any
296 if (mem && i < mem->req() && mem->in(i) && mem->in(i) != mem->empty_memory()) {
297 mem->in(i)->dump();
298 } else {
299 tty->cr();
300 }
301 }
302 }
303
304 #endif
|
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciSymbols.hpp"
27 #include "compiler/compileLog.hpp"
28 #include "oops/objArrayKlass.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/memnode.hpp"
31 #include "opto/mulnode.hpp"
32 #include "opto/parse.hpp"
33 #include "opto/rootnode.hpp"
34 #include "opto/runtime.hpp"
35 #include "runtime/sharedRuntime.hpp"
36
37 #ifndef PRODUCT
38 unsigned peaNumAllocsTracked = 0;
39 unsigned peaNumMaterializations = 0;
40
41 void printPeaStatistics() {
42 tty->print("PEA: ");
43 tty->print("num allocations tracked = %u, ", peaNumAllocsTracked);
44 tty->print_cr("num materializations = %u", peaNumMaterializations);
45 }
46 #endif
47
48 //------------------------------make_dtrace_method_entry_exit ----------------
49 // Dtrace -- record entry or exit of a method if compiled with dtrace support
50 void GraphKit::make_dtrace_method_entry_exit(ciMethod* method, bool is_entry) {
51 const TypeFunc *call_type = OptoRuntime::dtrace_method_entry_exit_Type();
52 address call_address = is_entry ? CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry) :
53 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit);
54 const char *call_name = is_entry ? "dtrace_method_entry" : "dtrace_method_exit";
55
56 // Get base of thread-local storage area
57 Node* thread = _gvn.transform( new ThreadLocalNode() );
58
59 // Get method
60 const TypePtr* method_type = TypeMetadataPtr::make(method);
61 Node *method_node = _gvn.transform(ConNode::make(method_type));
62
63 kill_dead_locals();
64
65 // For some reason, this call reads only raw memory.
66 const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
67 make_runtime_call(RC_LEAF | RC_NARROW_MEM,
274 }
275
276 Node* kls = makecon(TypeKlassPtr::make(klass));
277 Node* obj = new_instance(kls);
278
279 // Push resultant oop onto stack
280 push(obj);
281
282 // Keep track of whether opportunities exist for StringBuilder
283 // optimizations.
284 if (OptimizeStringConcat &&
285 (klass == C->env()->StringBuilder_klass() ||
286 klass == C->env()->StringBuffer_klass())) {
287 C->set_has_stringbuilder(true);
288 }
289
290 // Keep track of boxed values for EliminateAutoBox optimizations.
291 if (C->eliminate_boxing() && klass->is_box_klass()) {
292 C->set_has_boxed_value(true);
293 }
294
295 if (DoPartialEscapeAnalysis) {
296 // obj is a CheckCastPP Node, aka. cooked oop.
297 jvms()->alloc_state().add_new_allocation(this, obj);
298 }
299 }
300
301 #ifndef PRODUCT
302 //------------------------------dump_map_adr_mem-------------------------------
303 // Debug dump of the mapping from address types to MergeMemNode indices.
304 void Parse::dump_map_adr_mem() const {
305 tty->print_cr("--- Mapping from address types to memory Nodes ---");
306 MergeMemNode *mem = map() == nullptr ? nullptr : (map()->memory()->is_MergeMem() ?
307 map()->memory()->as_MergeMem() : nullptr);
308 for (uint i = 0; i < (uint)C->num_alias_types(); i++) {
309 C->alias_type(i)->print_on(tty);
310 tty->print("\t");
311 // Node mapping, if any
312 if (mem && i < mem->req() && mem->in(i) && mem->in(i) != mem->empty_memory()) {
313 mem->in(i)->dump();
314 } else {
315 tty->cr();
316 }
317 }
318 }
319
320 #endif
321
322 #include "ci/ciUtilities.inline.hpp"
323 #include "compiler/methodMatcher.hpp"
324
325 class PEAContext {
326 private:
327 BasicMatcher* _matcher;
328
329 PEAContext() {
330 if (PEAMethodOnly != nullptr) {
331 const char* error_msg = nullptr;
332 _matcher = BasicMatcher::parse_method_pattern((char*)PEAMethodOnly, error_msg, false);
333 if (error_msg != nullptr) {
334 tty->print_cr("Invalid PEAMethodOnly: %s", error_msg);
335 }
336 }
337 }
338
339 NONCOPYABLE(PEAContext);
340 public:
341 bool match(ciMethod* method) const;
342 // mayer's singleton.
343 static PEAContext& instance() {
344 static PEAContext s;
345 return s;
346 }
347 };
348
349 //
350 // Partial Escape Analysis
351 // Stadler, Lukas, Thomas Würthinger, and Hanspeter Mössenböck. "Partial escape analysis and scalar replacement for Java."
352 //
353 // Our adaption to C2.
354 // https://gist.github.com/navyxliu/62a510a5c6b0245164569745d758935b
355 //
356
357 VirtualState::VirtualState(const TypeOopPtr* oop_type): _oop_type(oop_type), _lockcnt(0) {
358 Compile* C = Compile::current();
359 int nof = nfields();
360 _entries = NEW_ARENA_ARRAY(C->parser_arena(), Node*, nof);
361 // only track explicit stores.
362 // see IntializeNode semantics in memnode.cpp
363 for (int i = 0; i < nof; ++i) {
364 _entries[i] = nullptr;
365 }
366 }
367
368 // do NOT call base's copy constructor. we would like to reset refcnt!
369 VirtualState::VirtualState(const VirtualState& other) : _oop_type(other._oop_type), _lockcnt(other._lockcnt) {
370 int nof = nfields();
371 _entries = NEW_ARENA_ARRAY(Compile::current()->parser_arena(), Node*, nof);
372
373 // Using arraycopy stub is more efficient?
374 Node** dst = _entries;
375 Node** src = other._entries;
376 while (nof-- > 0) {
377 *dst++ = *src++;
378 }
379 }
380
381 int VirtualState::nfields() const {
382 ciInstanceKlass* holder = _oop_type->is_instptr()->instance_klass();
383 return holder->nof_nonstatic_fields();
384 }
385
386 void VirtualState::set_field(ciField* field, Node* val) {
387 // We can't trust field->holder() here. It may reference to the super class.
388 // field layouter may flip order in jdk15+, refer to:
389 // https://shipilev.net/jvm/objects-inside-out/#_superhierarchy_gaps_in_java_15
390 //
391 // _oop_type is the exact type when we registered ObjID in allocation state.
392 //
393 ciInstanceKlass* holder = _oop_type->is_instptr()->instance_klass();
394
395 for (int i = 0; i < holder->nof_nonstatic_fields(); ++i) {
396 if (field->offset_in_bytes() == holder->nonstatic_field_at(i)->offset_in_bytes()) {
397 _entries[i] = val;
398 return;
399 }
400 }
401
402 ShouldNotReachHere();
403 }
404
405 Node* VirtualState::get_field(ciField* field) const {
406 ciInstanceKlass* holder = _oop_type->is_instptr()->instance_klass();
407
408 for (int i = 0; i < holder->nof_nonstatic_fields(); ++i) {
409 if (field->offset_in_bytes() == holder->nonstatic_field_at(i)->offset_in_bytes()) {
410 return _entries[i];
411 }
412 }
413
414 ShouldNotReachHere();
415 return nullptr;
416 }
417
418 static void ensure_phi(PhiNode* phi, uint pnum) {
419 while (phi->req() <= pnum) {
420 phi->add_req(nullptr);
421 }
422 }
423
424 static const Type* initialize_null_field(GraphKit* kit, ciField* field, Node*& val) {
425 assert(val == nullptr, "must been a null field");
426 const Type* type;
427 BasicType bt = field->layout_type();
428
429 if (bt == T_OBJECT && field->type()->is_instance_klass()) {
430 val = kit->gvn().makecon(TypePtr::NULL_PTR);
431 type = TypeInstPtr::make(TypePtr::BotPTR, field->type()->as_instance_klass());
432 } else {
433 val = kit->zerocon(bt);
434 type = Type::get_const_basic_type(bt);
435 }
436
437 return type;
438 }
439
440 ObjectState& VirtualState::merge(ObjectState* newin, GraphKit* kit, RegionNode* r, int pnum) {
441 assert(newin->is_virtual(), "only support VirtualState");
442
443 if (this != newin) {
444 VirtualState* vs = static_cast<VirtualState*>(newin);
445 ciInstanceKlass* ik = _oop_type->is_instptr()->instance_klass();
446 assert(nfields() == ik->nof_nonstatic_fields(), "_nfields should be consistent with instanceKlass");
447
448 for (int i = 0; i < nfields(); ++i) {
449 Node* m = _entries[i];
450
451 if (m != vs->_entries[i]) {
452 ciField* field = ik->nonstatic_field_at(i);
453 Node* n = vs->_entries[i];
454 const Type* tn;
455 if (n == nullptr) {
456 tn = initialize_null_field(kit, field, n);
457 } else {
458 tn = kit->gvn().type(n);
459 }
460
461 if (m == nullptr || !m->is_Phi() || m->in(0) != r) {
462 const Type* type;
463
464 if (m == nullptr) {
465 type = initialize_null_field(kit, field, m);
466 } else {
467 type = kit->gvn().type(m);
468 }
469 type = type->meet(tn);
470
471 m = PhiNode::make(r, m, type);
472 kit->gvn().set_type(m, type);
473 _entries[i] = m;
474 }
475
476 ensure_phi(m->as_Phi(), pnum);
477 m->set_req(pnum, n);
478 if (pnum == 1) {
479 _entries[i] = kit->gvn().transform(m);
480 }
481 }
482 }
483 }
484
485 return *this;
486 }
487
488 #ifndef PRODUCT
489 void VirtualState::print_on(outputStream* os) const {
490 os->print_cr("Virt = %p", this);
491
492 for (int i = 0; i < nfields(); ++i) {
493 Node* val = _entries[i];
494 os->print("#%d: ", i);
495 if (val != nullptr) {
496 val->dump();
497 } else {
498 os->print_cr("_");
499 }
500 }
501 }
502
503 void EscapedState::print_on(outputStream* os) const {
504 os->print_cr("Escaped = %p %d", this, _materialized);
505 if (_merged_value == nullptr) {
506 os->print_cr(" null");
507 } else {
508 _merged_value->dump();
509 }
510 }
511
512 #endif
513
514 void PEAState::add_new_allocation(GraphKit* kit, Node* obj) {
515 PartialEscapeAnalysis* pea = kit->PEA();
516 int nfields;
517 const TypeOopPtr* oop_type = obj->as_Type()->type()->is_oopptr();
518
519 if (oop_type->isa_aryptr()) {
520 const TypeAryPtr* ary_type = oop_type->is_aryptr();
521 const TypeInt* size = ary_type->size();
522 if (size->is_con() && size->get_con() <= EliminateAllocationArraySizeLimit) {
523 nfields = size->get_con();
524 } else {
525 // length of array is too long or unknown
526 return;
527 }
528 } else {
529 const TypeInstPtr* inst_type = oop_type->is_instptr();
530 nfields = inst_type->instance_klass()->nof_nonstatic_fields();
531 }
532
533 if (nfields >= 0) {
534 AllocateNode* alloc = obj->in(1)->in(0)->as_Allocate();
535 int idx = pea->add_object(alloc);
536 #ifndef PRODUCT
537 // node_idx_t is unsigned. Use static_cast<> here to avoid comparison between signed and unsigned.
538 if (PEA_debug_idx > 0 && alloc->_idx != static_cast<node_idx_t>(PEA_debug_idx)) { // only allow PEA_debug_idx
539 return;
540 } else if (PEA_debug_idx < 0 && alloc->_idx == static_cast<node_idx_t>(-PEA_debug_idx)) { // block PEA_debug_idx
541 return;
542 }
543 Atomic::inc(&peaNumAllocsTracked);
544 #endif
545 // Opt out all subclasses of Throwable because C2 will not inline all methods of them including <init>.
546 // PEA needs to materialize it at <init>.
547 ciInstanceKlass* ik = oop_type->is_instptr()->instance_klass();
548 ciEnv* env = ciEnv::current();
549 if (ik->is_subclass_of(env->Throwable_klass())) {
550 return;
551 }
552 // Opt out of all subclasses that non-partial escape analysis opts out of. Opt out of StringBuffer/Builder and
553 // defer those objects to StringOpts.
554 if (ik->is_subclass_of(env->Thread_klass()) ||
555 ik->is_subclass_of(env->Reference_klass()) ||
556 ik->is_subclass_of(env->StringBuffer_klass()) ||
557 ik->is_subclass_of(env->StringBuilder_klass()) ||
558 !ik->can_be_instantiated() || ik->has_finalizer()) {
559 return;
560 }
561 if (idx < PEA_debug_start || idx >= PEA_debug_stop) {
562 return;
563 }
564
565 ciMethod* method = kit->jvms()->method();
566 if (PEAContext::instance().match(method)) {
567 #ifndef PRODUCT
568 if (PEAVerbose) {
569 if (method != nullptr) {
570 method->dump_name_as_ascii(tty);
571 }
572 tty->print_cr(" start tracking %d | obj#%d", idx, alloc->_idx);
573 alloc->dump();
574 }
575 #endif
576 bool result = _state.put(alloc, new VirtualState(oop_type));
577 assert(result, "the key existed in _state");
578 pea->add_alias(alloc, obj);
579 }
580 }
581 }
582
583 PEAState& PEAState::operator=(const PEAState& init) {
584 if (this != &init) {
585 clear();
586
587 init._state.iterate([&](ObjID key, ObjectState* value) {
588 _state.put(key, value->clone());
589 return true;
590 });
591 }
592
593 #ifdef ASSERT
594 validate();
595 #endif
596 return *this;
597 }
598
599 void PEAState::put_field(GraphKit* kit, ciField* field, Node* objx, Node* val) {
600 Compile* C = kit->C;
601 PartialEscapeAnalysis* pea = C->PEA();
602 int offset = field->offset_in_bytes();
603 Node* adr = kit->basic_plus_adr(objx, objx, offset);
604 const TypePtr* adr_type = C->alias_type(field)->adr_type();
605 DecoratorSet decorators = IN_HEAP;
606
607 BasicType bt = field->layout_type();
608 const Type* type = Type::get_const_basic_type(bt);
609 bool is_obj = is_reference_type(bt);
610
611 if (is_obj && pea->is_alias(val)) {
612 // recurse if val is a virtual object.
613 if (as_virtual(pea, val)) {
614 materialize(kit, val);
615 }
616 EscapedState* es = as_escaped(pea, val);
617 assert(es != nullptr, "the object of val is not Escaped");
618 val = es->merged_value();
619 }
620 // Store the value.
621 const Type* field_type;
622 if (!field->type()->is_loaded()) {
623 field_type = TypeInstPtr::BOTTOM;
624 } else {
625 if (is_obj) {
626 field_type = TypeOopPtr::make_from_klass(field->type()->as_klass());
627 } else {
628 field_type = Type::BOTTOM;
629 }
630 }
631 decorators |= field->is_volatile() ? MO_SEQ_CST : MO_UNORDERED;
632
633 #ifndef PRODUCT
634 if (PEAVerbose) {
635 val->dump();
636 }
637 #endif
638 kit->access_store_at(objx, adr, adr_type, val, field_type, bt, decorators);
639 }
640
641 // Because relevant objects may form a directed cyclic graph, materialization is a DFS process.
642 // PEA clones the object and marks escaped in allocation state. PEA then iterates all fields
643 // and recursively materializes the references which are still aliasing with virtual objects in
644 // allocation state.
645 Node* PEAState::materialize(GraphKit* kit, Node* var) {
646 Compile* C = kit->C;
647 PartialEscapeAnalysis* pea = C->PEA();
648 ObjID alloc = pea->is_alias(var);
649 VirtualState* virt = static_cast<VirtualState*>(get_object_state(alloc));
650
651 assert(alloc != nullptr && get_object_state(alloc)->is_virtual(), "sanity check");
652 #ifndef PRODUCT
653 if (PEAVerbose) {
654 tty->print_cr("PEA materializes a virtual %d obj%d ", pea->object_idx(alloc), alloc->_idx);
655 }
656 Atomic::inc(&peaNumMaterializations);
657 #endif
658
659 const TypeOopPtr* oop_type = var->as_Type()->type()->is_oopptr();
660 Node* objx = kit->materialize_object(alloc, oop_type);
661
662 // we save VirtualState beforehand.
663 escape(alloc, objx, true);
664 pea->add_alias(alloc, objx);
665
666 if (virt->lockcnt() > 0 && GenerateSynchronizationCode) {
667 if (PEAVerbose) {
668 tty->print_cr("materializing an object with unbalanced monitor");
669 }
670
671 int mon_id = 0;
672 JVMState* jvms = kit->jvms();
673 int cnt = 0;
674 // It's possible that the locked moninitor is not in the youngest JVMState,
675 // so we have to follow the stacktrace to discover them all.
676 //
677 // PEA Materialization steals those monitors from the original object. Here is the scheme:
678 // 1. unlock the original object.
679 // 2. lock the materialized object.
680 // 3. backfill the obj for Monitor 'obj|box' pair when Parse translates 'monitor-exit'.
681 // 4. split Phi-Unlock in the upcoming monitor_exit (Parse::do_monitor_exit).
682 //
683 while (jvms != nullptr) {
684 for (mon_id = 0; mon_id < jvms->nof_monitors() && jvms->map()->monitor_obj(jvms, mon_id) != var; ++mon_id);
685
686 if (mon_id < jvms->nof_monitors()) {
687 cnt++;
688 Node* box = jvms->map()->monitor_box(jvms, mon_id);
689 kit->shared_unlock(box, var, true/*preserve_monitor*/); // PEA pops the monitor in Parse::monitor_exit().
690 kit->clone_shared_lock(box, objx);
691 }
692 jvms = jvms->caller();
693 }
694 assert(cnt == virt->lockcnt(), "steal all locks from var");
695 }
696
697 kit->replace_in_map(var, objx);
698
699 #ifndef PRODUCT
700 if (PEAVerbose) {
701 tty->print("new object: ");
702 objx->dump();
703 }
704 #endif
705
706 if (oop_type->isa_instptr()) {
707 // virt->_oop_type is an exact non-null pointer. oop_type may not be exact, or BOT
708 // We check that they both refer to the same java type.
709 assert(virt->_oop_type->is_instptr()->is_same_java_type_as(oop_type), "type of oopptr is inconsistent!");
710 #ifndef PRODUCT
711 if (PEAVerbose) {
712 ciInstanceKlass* ik = oop_type->is_instptr()->instance_klass();
713 tty->print("ciInstanceKlass: ");
714 ik->print_name_on(tty);
715 tty->cr();
716 }
717 #endif
718
719 for (auto&& it = virt->field_iterator(); it.has_next(); ++it) {
720 ciField* field = it.field();
721 Node* val = it.value();
722
723 #ifndef PRODUCT
724 if (PEAVerbose) {
725 tty->print("field: ");
726 field->print_name_on(tty);
727 tty->cr();
728 }
729 #endif
730 // no initial value or is captured by InitializeNode
731 if (val == nullptr) continue;
732
733 put_field(kit, field, objx, val);
734 }
735
736 // back from DFS, we still need to check again for all virtual states.
737 // they may have a field 'var' which has committed to memory via prior putfield. We emit a store with updated objx.
738 // Hopefully, two consecutive stores coalesce.
739 _state.iterate([&](ObjID obj, ObjectState* os) {
740 if (os->is_virtual()) {
741 VirtualState* vs = static_cast<VirtualState*>(os);
742
743 for (auto&& i = vs->field_iterator(); i.has_next(); ++i) {
744 if (i.value() == var) {
745 vs->set_field(i.field(), objx);
746 put_field(kit, i.field(), get_java_oop(obj), objx);
747 }
748 }
749 }
750 return true;
751 });
752
753 // if var is associated with MemBarRelease, copy it for objx
754 for (DUIterator_Fast kmax, k = var->fast_outs(kmax); k < kmax; k++) {
755 Node* use = var->fast_out(k);
756
757 if (use->Opcode() == Op_MemBarRelease) {
758 kit->insert_mem_bar(Op_MemBarRelease, objx);
759 break;
760 }
761 }
762 } else {
763 assert(false, "array not support yet!");
764 }
765
766 #ifdef ASSERT
767 validate();
768 #endif
769 return objx;
770 }
771
772 #ifndef PRODUCT
773 void PEAState::print_on(outputStream* os) const {
774 if (size() > 0) {
775 os->print_cr("PEAState:");
776 }
777
778 _state.iterate([&](ObjID obj, ObjectState* state) {
779 bool is_virt = state->is_virtual();
780 os->print("Obj#%d(%s) ref = %d\n", obj->_idx, is_virt ? "Virt" : "Mat", state->ref_cnt());
781
782 if (is_virt) {
783 VirtualState* vs = static_cast<VirtualState*>(state);
784 vs->print_on(os);
785 } else {
786 EscapedState* es = static_cast<EscapedState*>(state);
787 es->print_on(tty);
788 }
789 return true;
790 });
791 }
792
793 #endif
794
795 #ifdef ASSERT
796 void PEAState::validate() const {
797 }
798 #endif
799
800 bool safepointContains(SafePointNode* sfpt, Node *oop) {
801 for (uint i = TypeFunc::Parms; i < sfpt->req(); ++i) {
802 if (oop == sfpt->in(i)) {
803 return true;
804 }
805 }
806 return false;
807 }
808
809 void PEAState::mark_all_escaped(PartialEscapeAnalysis* pea, ObjID id, Node* obj) {
810 VirtualState* virt = as_virtual(pea, obj);
811 escape(id, obj, false);
812
813 for (auto&& it = virt->field_iterator(); it.has_next(); ++it) {
814 ciField* field = it.field();
815 Node* val = it.value();
816
817 BasicType bt = field->layout_type();
818 bool is_obj = is_reference_type(bt);
819
820 ObjID alias = pea->is_alias(val);
821 if (is_obj && alias != nullptr) {
822 // recurse if val is a virtual object.
823 if (get_object_state(alias)->is_virtual()) {
824 mark_all_escaped(pea, alias, val);
825 }
826 assert(as_escaped(pea, val) != nullptr, "the object of val is not Escaped");
827 }
828 }
829 }
830
831 void PEAState::mark_all_live_objects_escaped(PartialEscapeAnalysis *pea, SafePointNode* sfpt) {
832 Unique_Node_List objs;
833 int sz = objects(objs);
834
835 for (int i = 0; i < sz; ++i) {
836 ObjID id = static_cast<ObjID>(objs.at(i));
837 ObjectState* os = get_object_state(id);
838
839 if (os->is_virtual()) {
840 Node *oop = get_java_oop(id);
841 // We only need to mark objects that are live as escaped.
842 if (safepointContains(sfpt, oop)) {
843 mark_all_escaped(pea, id, oop);
844 }
845 }
846 }
847 }
848
849 // get the key set from _state. we stop maintaining aliases for the materialized objects.
850 int PEAState::objects(Unique_Node_List& nodes) const {
851 _state.iterate([&](ObjID obj, ObjectState* state) {
852 nodes.push(obj); return true;
853 });
854 return nodes.size();
855 }
856
857 // We track '_merged_value' along with control-flow but only return it if _materialized = true;
858 // GraphKit::backfill_materialized() replaces the original CheckCastPP with it at do_exits() or at safepoints.
859 // If materialization doesn't take place, replacement shouldn't happen either.
860 //
861 // @return: nullptr if id has not been materialized, or the SSA java_oop that denotes the original object.
862 Node* PEAState::get_materialized_value(ObjID id) const {
863 assert(contains(id), "must exists in allocation");
864 ObjectState* os = get_object_state(id);
865
866 if (os->is_virtual()) {
867 return nullptr;
868 } else {
869 return static_cast<EscapedState*>(os)->materialized_value();
870 }
871 }
872
873 Node* PEAState::get_java_oop(ObjID id) const {
874 if (!contains(id)) return nullptr;
875
876 Node* obj = get_materialized_value(id);
877 if (obj != nullptr) {
878 return obj;
879 }
880
881 ProjNode* resproj = id->proj_out_or_null(TypeFunc::Parms);
882 if (resproj != nullptr) {
883 for (DUIterator_Fast imax, i = resproj->fast_outs(imax); i < imax; i++) {
884 Node* p = resproj->fast_out(i);
885 if (p->is_CheckCastPP()) {
886 assert(obj == nullptr, "multiple CheckCastPP?");
887 obj = p;
888 }
889 }
890 }
891 assert(obj == nullptr || AllocateNode::Ideal_allocation(obj) == id, "sanity check");
892 return obj;
893 }
894
895 AllocationStateMerger::AllocationStateMerger(PEAState& target) : _state(target) {}
896
897 void AllocationStateMerger::merge(PEAState& newin, GraphKit* kit, RegionNode* region, int pnum) {
898 PartialEscapeAnalysis* pea = kit->PEA();
899 Unique_Node_List set1, set2;
900
901 _state.objects(set1);
902 newin.objects(set2);
903
904 VectorSet intersection = intersect(set1.member_set(), set2.member_set());
905 set1.remove_useless_nodes(intersection);
906
907 for (uint i = 0; i < set1.size(); ++i) {
908 ObjID obj = static_cast<ObjID>(set1.at(i));
909 ObjectState* os1 = _state.get_object_state(obj);
910 ObjectState* os2 = newin.get_object_state(obj);
911 if (os1->is_virtual() && os2->is_virtual()) {
912 os1->merge(os2, kit, region, pnum);
913 } else {
914 assert(os1 != nullptr && os2 != nullptr, "sanity check");
915 Node* m;
916 Node* n;
917 bool materialized;
918 EscapedState* es;
919
920 if (os1->is_virtual()) {
921 // If obj is virtual in current state, it must be escaped in newin.
922 // Mark it escaped in current state.
923 EscapedState* es2 = static_cast<EscapedState*>(os2);
924 materialized = es2->has_materialized();
925 m = _state.get_java_oop(obj);
926 n = es2->merged_value();
927 es = _state.escape(obj, m, materialized);
928 } else if (os2->is_virtual()) {
929 // If obj is virtual in newin, it must be escaped in current state.
930 // Mark it escaped in newin
931 es = static_cast<EscapedState*>(os1);
932 materialized = es->has_materialized();
933 m = es->merged_value();
934 n = newin.get_java_oop(obj);
935 os2 = newin.escape(obj, n, false);
936 } else {
937 // obj is escaped in both newin and current state.
938 es = static_cast<EscapedState*>(os1);
939 EscapedState* es2 = static_cast<EscapedState*>(os2);
940 m = es->merged_value();
941 n = es2->merged_value();
942 materialized = es->has_materialized() || es2->has_materialized();
943 }
944
945 if (m->is_Phi() && m->in(0) == region) {
946 ensure_phi(m->as_Phi(), pnum);
947 // only update the pnum if we have never seen it before.
948 if (m->in(pnum) == nullptr) {
949 m->set_req(pnum, n);
950 }
951 } else if (m != n) {
952 const Type* type = obj->oop_type(kit->gvn());
953 Node* phi = PhiNode::make(region, m, type);
954 phi->set_req(pnum, n);
955 kit->gvn().set_type(phi, type);
956 es->update(materialized, phi);
957 }
958 }
959 }
960
961 // process individual phi
962 SafePointNode* map = kit->map();
963 for (uint i = 0; i < map->req(); ++i) {
964 Node* node = map->in(i);
965
966 if (node != nullptr && node->is_Phi() && node->as_Phi()->region() == region) {
967 process_phi(node->as_Phi(), kit, region, pnum);
968 }
969 }
970
971 #ifdef ASSERT
972 _state.validate();
973 #endif
974 }
975
976 // Passive Materialization
977 // ------------------------
978 // Materialize an object at the phi node because at least one of its predecessors has materialized the object.
979 // Since C2 PEA does not eliminate the original allocation, we skip passive materializaiton and keep using it.
980 // The only problem is partial redudancy. JDK-8287061 should address this issue.
981 //
982 // PEA split a object based on its escapement. At the merging point, the original object is NonEscape, or it has already
983 // been materialized before. the phi is 'reducible Object-Phi' in JDK-828706 and the original object is scalar replaceable!
984 //
985 // obj' = PHI(Region, OriginalObj, ClonedObj)
986 // and OriginalObj is NonEscape but NSR; CloendObj is Global/ArgEscape
987 //
988 // JDK-8287061 transforms it to =>
989 // obj' = PHI(Region, null, ClonedObj)
990 // selector = PHI(Region, 0, 1)
991 //
992 // since OriginalObj is NonEscape, it is replaced by scalars.
993 //
994 static Node* ensure_object_materialized(Node* var, PEAState& state, SafePointNode* from_map, RegionNode* r, int pnum) {
995 // skip passive materialize for time being.
996 // if JDK-8287061 can guarantee to replace the orignial allocation, we don't need to worry about partial redundancy.
997 return var;
998 }
999
1000 // Merge phi node incrementally.
1001 // we check all merged inputs in _state.
1002 // 1. all inputs refer to the same ObjID, then phi is created as alias of ObjID
1003 // 2. otherwise, any input is alias with a 'virtual' object needs to convert to 'Escaped'. replace input with merged_value.
1004 // 3. otherwise, if any input is aliased with an Escaped object. replace input with merged value.
1005 void AllocationStateMerger::process_phi(PhiNode* phi, GraphKit* kit, RegionNode* region, int pnum) {
1006 ObjID unique = nullptr;
1007 bool materialized = false;
1008 bool same_obj = true;
1009 PartialEscapeAnalysis* pea = kit->PEA();
1010
1011 if (pea == nullptr) return;
1012
1013 for (uint i = 1; i < phi->req(); ++i) {
1014 if (region->in(i) == nullptr || region->in(i)->is_top())
1015 continue;
1016
1017 Node* node = phi->in(i);
1018 ObjID obj = pea->is_alias(node);
1019 if (obj != nullptr) {
1020 if (unique == nullptr) {
1021 unique = obj;
1022 } else if (unique != obj) {
1023 same_obj = false;
1024 }
1025 EscapedState* es = _state.as_escaped(pea, node);
1026 if (es != nullptr) {
1027 materialized |= es->has_materialized();
1028 }
1029 } else {
1030 same_obj = false;
1031 }
1032 }
1033
1034 if (same_obj) {
1035 //xliu: should I also check pnum == 1?
1036 // phi nodes for exception handler may have leave normal paths vacant.
1037 pea->add_alias(unique, phi);
1038 } else {
1039 bool printed = false;
1040
1041 for (uint i = 1; i < phi->req(); ++i) {
1042 if (region->in(i) == nullptr || region->in(i)->is_top())
1043 continue;
1044
1045 Node* node = phi->in(i);
1046 ObjID obj = pea->is_alias(node);
1047 if (obj != nullptr && _state.contains(obj)) {
1048 ObjectState* os = _state.get_object_state(obj);
1049 if (os->is_virtual()) {
1050 Node* n = ensure_object_materialized(node, _state, kit->map(), region, pnum);
1051 os = _state.escape(obj, n, materialized);
1052 }
1053 EscapedState* es = static_cast<EscapedState*>(os);
1054 Node* value = es->merged_value();
1055 if (value->is_Phi() && value->in(0) == region) {
1056 value = value->in(i);
1057 }
1058
1059 if (node != value) {
1060 assert(value != phi, "sanity");
1061 #ifndef PRODUCT
1062 if (PEAVerbose) {
1063 if (!printed) {
1064 phi->dump();
1065 printed = true;
1066 }
1067 tty->print_cr("[PEA] replace %dth input with node %d", i, value->_idx);
1068 }
1069 #endif
1070 phi->replace_edge(node, value);
1071 }
1072 }
1073 }
1074 ObjID obj = pea->is_alias(phi);
1075 if (obj != nullptr) {
1076 pea->remove_alias(obj, phi);
1077 }
1078 }
1079 }
1080
1081 void AllocationStateMerger::merge_at_phi_creation(const PartialEscapeAnalysis* pea, PEAState& newin, PhiNode* phi, Node* m, Node* n) {
1082 ObjID obj1 = pea->is_alias(m);
1083 ObjID obj2 = pea->is_alias(n);
1084
1085 if (_state.contains(obj1)) { // m points to an object that as is tracking.
1086 ObjectState* os1 = _state.get_object_state(obj1);
1087 ObjectState* os2 = newin.contains(obj2) ? newin.get_object_state(obj2) : nullptr;
1088
1089 // obj1 != obj2 if n points to something else. It could be the other object, null or a ConP.
1090 // we do nothing here because PEA doesn't create phi in this case.
1091 if (obj1 == obj2 && os2 != nullptr) { // n points to the same object and pred_as is trakcing.
1092 if (!os1->is_virtual() || !os2->is_virtual()) {
1093 if (os2->is_virtual()) {
1094 // passive materialize
1095 os2 = newin.escape(obj2, n, false);
1096 }
1097
1098 if (os1->is_virtual()) {
1099 bool materialized = static_cast<EscapedState*>(os2)->has_materialized();
1100 _state.escape(obj1, phi, materialized);
1101 } else {
1102 static_cast<EscapedState*>(os1)->update(phi);
1103 }
1104 }
1105 }
1106 }
1107 }
1108
1109 AllocationStateMerger::~AllocationStateMerger() {
1110 }
1111
1112 bool PEAContext::match(ciMethod* method) const {
1113 if (_matcher != nullptr && method != nullptr) {
1114 VM_ENTRY_MARK;
1115 methodHandle mh(THREAD, method->get_Method());
1116 return _matcher->match(mh);
1117 }
1118 return true;
1119 }
1120
1121 EscapedState* PEAState::escape(ObjID id, Node* p, bool materialized) {
1122 assert(p != nullptr, "the new alias must be non-null");
1123 Node* old = nullptr;
1124 EscapedState* es;
1125
1126 if (contains(id)) {
1127 ObjectState* os = get_object_state(id);
1128 // if os is EscapedState and its materialized_value is not-null,
1129 if (!os->is_virtual()) {
1130 materialized |= static_cast<EscapedState*>(os)->has_materialized();
1131 }
1132 es = new EscapedState(materialized, p);
1133 es->ref_cnt(os->ref_cnt()); // copy the refcnt from the original ObjectState.
1134 } else {
1135 es = new EscapedState(materialized, p);
1136 }
1137 _state.put(id, es);
1138 if (materialized) {
1139 static_cast<AllocateNode*>(id)->inc_materialized();
1140 }
1141 assert(contains(id), "sanity check");
1142 return es;
1143 }
|