< prev index next >

src/hotspot/share/opto/callnode.cpp

Print this page

 261   return TypeFunc::Parms <= idx  &&  idx <= TypeFunc::Parms+1;
 262 }
 263 
 264 //=============================================================================
 265 JVMState::JVMState(ciMethod* method, JVMState* caller) :
 266   _method(method) {
 267   assert(method != nullptr, "must be valid call site");
 268   _bci = InvocationEntryBci;
 269   _reexecute = Reexecute_Undefined;
 270   debug_only(_bci = -99);  // random garbage value
 271   debug_only(_map = (SafePointNode*)-1);
 272   _caller = caller;
 273   _depth  = 1 + (caller == nullptr ? 0 : caller->depth());
 274   _locoff = TypeFunc::Parms;
 275   _stkoff = _locoff + _method->max_locals();
 276   _monoff = _stkoff + _method->max_stack();
 277   _scloff = _monoff;
 278   _endoff = _monoff;
 279   _sp = 0;
 280 }

 281 JVMState::JVMState(int stack_size) :
 282   _method(nullptr) {
 283   _bci = InvocationEntryBci;
 284   _reexecute = Reexecute_Undefined;
 285   debug_only(_map = (SafePointNode*)-1);
 286   _caller = nullptr;
 287   _depth  = 1;
 288   _locoff = TypeFunc::Parms;
 289   _stkoff = _locoff;
 290   _monoff = _stkoff + stack_size;
 291   _scloff = _monoff;
 292   _endoff = _monoff;
 293   _sp = 0;
 294 }
 295 
 296 //--------------------------------of_depth-------------------------------------
 297 JVMState* JVMState::of_depth(int d) const {
 298   const JVMState* jvmp = this;
 299   assert(0 < d && (uint)d <= depth(), "oob");
 300   for (int skip = depth() - d; skip > 0; skip--) {

 597 
 598 // Extra way to dump a jvms from the debugger,
 599 // to avoid a bug with C++ member function calls.
 600 void dump_jvms(JVMState* jvms) {
 601   jvms->dump();
 602 }
 603 #endif
 604 
 605 //--------------------------clone_shallow--------------------------------------
 606 JVMState* JVMState::clone_shallow(Compile* C) const {
 607   JVMState* n = has_method() ? new (C) JVMState(_method, _caller) : new (C) JVMState(0);
 608   n->set_bci(_bci);
 609   n->_reexecute = _reexecute;
 610   n->set_locoff(_locoff);
 611   n->set_stkoff(_stkoff);
 612   n->set_monoff(_monoff);
 613   n->set_scloff(_scloff);
 614   n->set_endoff(_endoff);
 615   n->set_sp(_sp);
 616   n->set_map(_map);

 617   return n;
 618 }
 619 
 620 //---------------------------clone_deep----------------------------------------
 621 JVMState* JVMState::clone_deep(Compile* C) const {
 622   JVMState* n = clone_shallow(C);
 623   for (JVMState* p = n; p->_caller != nullptr; p = p->_caller) {
 624     p->_caller = p->_caller->clone_shallow(C);
 625   }
 626   assert(n->depth() == depth(), "sanity");
 627   assert(n->debug_depth() == debug_depth(), "sanity");
 628   return n;
 629 }
 630 
 631 /**
 632  * Reset map for all callers
 633  */
 634 void JVMState::set_map_deep(SafePointNode* map) {
 635   for (JVMState* p = this; p != nullptr; p = p->_caller) {
 636     p->set_map(map);

1559     return (SafePointScalarMergeNode*)cached;
1560   }
1561   new_node = true;
1562   SafePointScalarMergeNode* res = (SafePointScalarMergeNode*)Node::clone();
1563   sosn_map->Insert((void*)this, (void*)res);
1564   return res;
1565 }
1566 
1567 #ifndef PRODUCT
1568 void SafePointScalarMergeNode::dump_spec(outputStream *st) const {
1569   st->print(" # merge_pointer_idx=%d, scalarized_objects=%d", _merge_pointer_idx, req()-1);
1570 }
1571 #endif
1572 
1573 //=============================================================================
1574 uint AllocateNode::size_of() const { return sizeof(*this); }
1575 
1576 AllocateNode::AllocateNode(Compile* C, const TypeFunc *atype,
1577                            Node *ctrl, Node *mem, Node *abio,
1578                            Node *size, Node *klass_node, Node *initial_test)
1579   : CallNode(atype, nullptr, TypeRawPtr::BOTTOM)
1580 {
1581   init_class_id(Class_Allocate);
1582   init_flags(Flag_is_macro);
1583   _is_scalar_replaceable = false;
1584   _is_non_escaping = false;
1585   _is_allocation_MemBar_redundant = false;
1586   Node *topnode = C->top();
1587 
1588   init_req( TypeFunc::Control  , ctrl );
1589   init_req( TypeFunc::I_O      , abio );
1590   init_req( TypeFunc::Memory   , mem );
1591   init_req( TypeFunc::ReturnAdr, topnode );
1592   init_req( TypeFunc::FramePtr , topnode );
1593   init_req( AllocSize          , size);
1594   init_req( KlassNode          , klass_node);
1595   init_req( InitialTest        , initial_test);
1596   init_req( ALength            , topnode);
1597   init_req( ValidLengthTest    , topnode);
1598   C->add_macro_node(this);
1599 }

1604          initializer->is_initializer() &&
1605          !initializer->is_static(),
1606              "unexpected initializer method");
1607   BCEscapeAnalyzer* analyzer = initializer->get_bcea();
1608   if (analyzer == nullptr) {
1609     return;
1610   }
1611 
1612   // Allocation node is first parameter in its initializer
1613   if (analyzer->is_arg_stack(0) || analyzer->is_arg_local(0)) {
1614     _is_allocation_MemBar_redundant = true;
1615   }
1616 }
1617 Node *AllocateNode::make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem) {
1618   Node* mark_node = nullptr;
1619   // For now only enable fast locking for non-array types
1620   mark_node = phase->MakeConX(markWord::prototype().value());
1621   return mark_node;
1622 }
1623 










1624 // Retrieve the length from the AllocateArrayNode. Narrow the type with a
1625 // CastII, if appropriate.  If we are not allowed to create new nodes, and
1626 // a CastII is appropriate, return null.
1627 Node *AllocateArrayNode::make_ideal_length(const TypeOopPtr* oop_type, PhaseValues* phase, bool allow_new_nodes) {
1628   Node *length = in(AllocateNode::ALength);
1629   assert(length != nullptr, "length is not null");
1630 
1631   const TypeInt* length_type = phase->find_int_type(length);
1632   const TypeAryPtr* ary_type = oop_type->isa_aryptr();
1633 
1634   if (ary_type != nullptr && length_type != nullptr) {
1635     const TypeInt* narrow_length_type = ary_type->narrow_size_type(length_type);
1636     if (narrow_length_type != length_type) {
1637       // Assert one of:
1638       //   - the narrow_length is 0
1639       //   - the narrow_length is not wider than length
1640       assert(narrow_length_type == TypeInt::ZERO ||
1641              (length_type->is_con() && narrow_length_type->is_con() &&
1642               (narrow_length_type->_hi <= length_type->_lo)) ||
1643              (narrow_length_type->_hi <= length_type->_hi &&

 261   return TypeFunc::Parms <= idx  &&  idx <= TypeFunc::Parms+1;
 262 }
 263 
 264 //=============================================================================
 265 JVMState::JVMState(ciMethod* method, JVMState* caller) :
 266   _method(method) {
 267   assert(method != nullptr, "must be valid call site");
 268   _bci = InvocationEntryBci;
 269   _reexecute = Reexecute_Undefined;
 270   debug_only(_bci = -99);  // random garbage value
 271   debug_only(_map = (SafePointNode*)-1);
 272   _caller = caller;
 273   _depth  = 1 + (caller == nullptr ? 0 : caller->depth());
 274   _locoff = TypeFunc::Parms;
 275   _stkoff = _locoff + _method->max_locals();
 276   _monoff = _stkoff + _method->max_stack();
 277   _scloff = _monoff;
 278   _endoff = _monoff;
 279   _sp = 0;
 280 }
 281 
 282 JVMState::JVMState(int stack_size) :
 283   _method(nullptr) {
 284   _bci = InvocationEntryBci;
 285   _reexecute = Reexecute_Undefined;
 286   debug_only(_map = (SafePointNode*)-1);
 287   _caller = nullptr;
 288   _depth  = 1;
 289   _locoff = TypeFunc::Parms;
 290   _stkoff = _locoff;
 291   _monoff = _stkoff + stack_size;
 292   _scloff = _monoff;
 293   _endoff = _monoff;
 294   _sp = 0;
 295 }
 296 
 297 //--------------------------------of_depth-------------------------------------
 298 JVMState* JVMState::of_depth(int d) const {
 299   const JVMState* jvmp = this;
 300   assert(0 < d && (uint)d <= depth(), "oob");
 301   for (int skip = depth() - d; skip > 0; skip--) {

 598 
 599 // Extra way to dump a jvms from the debugger,
 600 // to avoid a bug with C++ member function calls.
 601 void dump_jvms(JVMState* jvms) {
 602   jvms->dump();
 603 }
 604 #endif
 605 
 606 //--------------------------clone_shallow--------------------------------------
 607 JVMState* JVMState::clone_shallow(Compile* C) const {
 608   JVMState* n = has_method() ? new (C) JVMState(_method, _caller) : new (C) JVMState(0);
 609   n->set_bci(_bci);
 610   n->_reexecute = _reexecute;
 611   n->set_locoff(_locoff);
 612   n->set_stkoff(_stkoff);
 613   n->set_monoff(_monoff);
 614   n->set_scloff(_scloff);
 615   n->set_endoff(_endoff);
 616   n->set_sp(_sp);
 617   n->set_map(_map);
 618   n->_alloc_state = _alloc_state;
 619   return n;
 620 }
 621 
 622 //---------------------------clone_deep----------------------------------------
 623 JVMState* JVMState::clone_deep(Compile* C) const {
 624   JVMState* n = clone_shallow(C);
 625   for (JVMState* p = n; p->_caller != nullptr; p = p->_caller) {
 626     p->_caller = p->_caller->clone_shallow(C);
 627   }
 628   assert(n->depth() == depth(), "sanity");
 629   assert(n->debug_depth() == debug_depth(), "sanity");
 630   return n;
 631 }
 632 
 633 /**
 634  * Reset map for all callers
 635  */
 636 void JVMState::set_map_deep(SafePointNode* map) {
 637   for (JVMState* p = this; p != nullptr; p = p->_caller) {
 638     p->set_map(map);

1561     return (SafePointScalarMergeNode*)cached;
1562   }
1563   new_node = true;
1564   SafePointScalarMergeNode* res = (SafePointScalarMergeNode*)Node::clone();
1565   sosn_map->Insert((void*)this, (void*)res);
1566   return res;
1567 }
1568 
1569 #ifndef PRODUCT
1570 void SafePointScalarMergeNode::dump_spec(outputStream *st) const {
1571   st->print(" # merge_pointer_idx=%d, scalarized_objects=%d", _merge_pointer_idx, req()-1);
1572 }
1573 #endif
1574 
1575 //=============================================================================
1576 uint AllocateNode::size_of() const { return sizeof(*this); }
1577 
1578 AllocateNode::AllocateNode(Compile* C, const TypeFunc *atype,
1579                            Node *ctrl, Node *mem, Node *abio,
1580                            Node *size, Node *klass_node, Node *initial_test)
1581   : CallNode(atype, nullptr, TypeRawPtr::BOTTOM), _materialized(0)
1582 {
1583   init_class_id(Class_Allocate);
1584   init_flags(Flag_is_macro);
1585   _is_scalar_replaceable = false;
1586   _is_non_escaping = false;
1587   _is_allocation_MemBar_redundant = false;
1588   Node *topnode = C->top();
1589 
1590   init_req( TypeFunc::Control  , ctrl );
1591   init_req( TypeFunc::I_O      , abio );
1592   init_req( TypeFunc::Memory   , mem );
1593   init_req( TypeFunc::ReturnAdr, topnode );
1594   init_req( TypeFunc::FramePtr , topnode );
1595   init_req( AllocSize          , size);
1596   init_req( KlassNode          , klass_node);
1597   init_req( InitialTest        , initial_test);
1598   init_req( ALength            , topnode);
1599   init_req( ValidLengthTest    , topnode);
1600   C->add_macro_node(this);
1601 }

1606          initializer->is_initializer() &&
1607          !initializer->is_static(),
1608              "unexpected initializer method");
1609   BCEscapeAnalyzer* analyzer = initializer->get_bcea();
1610   if (analyzer == nullptr) {
1611     return;
1612   }
1613 
1614   // Allocation node is first parameter in its initializer
1615   if (analyzer->is_arg_stack(0) || analyzer->is_arg_local(0)) {
1616     _is_allocation_MemBar_redundant = true;
1617   }
1618 }
1619 Node *AllocateNode::make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem) {
1620   Node* mark_node = nullptr;
1621   // For now only enable fast locking for non-array types
1622   mark_node = phase->MakeConX(markWord::prototype().value());
1623   return mark_node;
1624 }
1625 
1626 // This is a precise notnull oop of the klass.
1627 // (Actually, it need not be precise if this is a reflective allocation.)
1628 // It's what we cast the result to.
1629 const TypeOopPtr* AllocateNode::oop_type(const PhaseValues& phase) const {
1630   Node* klass_node = in(KlassNode);
1631   const TypeKlassPtr* tklass = phase.type(klass_node)->isa_klassptr();
1632   if (!tklass) tklass = TypeInstKlassPtr::OBJECT;
1633   return tklass->as_instance_type();
1634 }
1635 
1636 // Retrieve the length from the AllocateArrayNode. Narrow the type with a
1637 // CastII, if appropriate.  If we are not allowed to create new nodes, and
1638 // a CastII is appropriate, return null.
1639 Node *AllocateArrayNode::make_ideal_length(const TypeOopPtr* oop_type, PhaseValues* phase, bool allow_new_nodes) {
1640   Node *length = in(AllocateNode::ALength);
1641   assert(length != nullptr, "length is not null");
1642 
1643   const TypeInt* length_type = phase->find_int_type(length);
1644   const TypeAryPtr* ary_type = oop_type->isa_aryptr();
1645 
1646   if (ary_type != nullptr && length_type != nullptr) {
1647     const TypeInt* narrow_length_type = ary_type->narrow_size_type(length_type);
1648     if (narrow_length_type != length_type) {
1649       // Assert one of:
1650       //   - the narrow_length is 0
1651       //   - the narrow_length is not wider than length
1652       assert(narrow_length_type == TypeInt::ZERO ||
1653              (length_type->is_con() && narrow_length_type->is_con() &&
1654               (narrow_length_type->_hi <= length_type->_lo)) ||
1655              (narrow_length_type->_hi <= length_type->_hi &&
< prev index next >