< prev index next >

src/hotspot/share/opto/memnode.cpp

Print this page

3244 uint MemBarNode::hash() const { return NO_HASH; }
3245 bool MemBarNode::cmp( const Node &n ) const {
3246   return (&n == this);          // Always fail except on self
3247 }
3248 
3249 //------------------------------make-------------------------------------------
3250 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
3251   switch (opcode) {
3252   case Op_MemBarAcquire:     return new MemBarAcquireNode(C, atp, pn);
3253   case Op_LoadFence:         return new LoadFenceNode(C, atp, pn);
3254   case Op_MemBarRelease:     return new MemBarReleaseNode(C, atp, pn);
3255   case Op_StoreFence:        return new StoreFenceNode(C, atp, pn);
3256   case Op_MemBarStoreStore:  return new MemBarStoreStoreNode(C, atp, pn);
3257   case Op_StoreStoreFence:   return new StoreStoreFenceNode(C, atp, pn);
3258   case Op_MemBarAcquireLock: return new MemBarAcquireLockNode(C, atp, pn);
3259   case Op_MemBarReleaseLock: return new MemBarReleaseLockNode(C, atp, pn);
3260   case Op_MemBarVolatile:    return new MemBarVolatileNode(C, atp, pn);
3261   case Op_MemBarCPUOrder:    return new MemBarCPUOrderNode(C, atp, pn);
3262   case Op_OnSpinWait:        return new OnSpinWaitNode(C, atp, pn);
3263   case Op_Initialize:        return new InitializeNode(C, atp, pn);
3264   case Op_Blackhole:         return new BlackholeNode(C, atp, pn);
3265   default: ShouldNotReachHere(); return NULL;
3266   }
3267 }
3268 
3269 void MemBarNode::remove(PhaseIterGVN *igvn) {
3270   if (outcnt() != 2) {
3271     assert(Opcode() == Op_Initialize, "Only seen when there are no use of init memory");
3272     assert(outcnt() == 1, "Only control then");
3273   }
3274   if (trailing_store() || trailing_load_store()) {
3275     MemBarNode* leading = leading_membar();
3276     if (leading != NULL) {
3277       assert(leading->trailing_membar() == this, "inconsistent leading/trailing membars");
3278       leading->remove(igvn);
3279     }
3280   }
3281   if (proj_out_or_null(TypeFunc::Memory) != NULL) {
3282     igvn->replace_node(proj_out(TypeFunc::Memory), in(TypeFunc::Memory));
3283   }
3284   if (proj_out_or_null(TypeFunc::Control) != NULL) {

3484       } else {
3485         Node* in = n->in(0);
3486         if (in != NULL && !in->is_top()) {
3487           wq.push(in);
3488         }
3489       }
3490     }
3491   }
3492   assert(found == 1 || (found == 0 && leading == NULL), "consistency check failed");
3493 #endif
3494   if (leading == NULL) {
3495     return NULL;
3496   }
3497   MemBarNode* mb = leading->as_MemBar();
3498   assert((mb->_kind == LeadingStore && _kind == TrailingStore) ||
3499          (mb->_kind == LeadingLoadStore && _kind == TrailingLoadStore), "bad leading membar");
3500   assert(mb->_pair_idx == _pair_idx, "bad leading membar");
3501   return mb;
3502 }
3503 
3504 #ifndef PRODUCT
3505 void BlackholeNode::format(PhaseRegAlloc* ra, outputStream* st) const {
3506   st->print("blackhole ");
3507   bool first = true;
3508   for (uint i = 0; i < req(); i++) {
3509     Node* n = in(i);
3510     if (n != NULL && OptoReg::is_valid(ra->get_reg_first(n))) {
3511       if (first) {
3512         first = false;
3513       } else {
3514         st->print(", ");
3515       }
3516       char buf[128];
3517       ra->dump_register(n, buf);
3518       st->print("%s", buf);
3519     }
3520   }
3521   st->cr();
3522 }
3523 #endif
3524 
3525 //===========================InitializeNode====================================
3526 // SUMMARY:
3527 // This node acts as a memory barrier on raw memory, after some raw stores.
3528 // The 'cooked' oop value feeds from the Initialize, not the Allocation.
3529 // The Initialize can 'capture' suitably constrained stores as raw inits.
3530 // It can coalesce related raw stores into larger units (called 'tiles').
3531 // It can avoid zeroing new storage for memory units which have raw inits.
3532 // At macro-expansion, it is marked 'complete', and does not optimize further.
3533 //
3534 // EXAMPLE:
3535 // The object 'new short[2]' occupies 16 bytes in a 32-bit machine.
3536 //   ctl = incoming control; mem* = incoming memory
3537 // (Note:  A star * on a memory edge denotes I/O and other standard edges.)
3538 // First allocate uninitialized memory and fill in the header:
3539 //   alloc = (Allocate ctl mem* 16 #short[].klass ...)
3540 //   ctl := alloc.Control; mem* := alloc.Memory*
3541 //   rawmem = alloc.Memory; rawoop = alloc.RawAddress
3542 // Then initialize to zero the non-header parts of the raw memory block:
3543 //   init = (Initialize alloc.Control alloc.Memory* alloc.RawAddress)

3244 uint MemBarNode::hash() const { return NO_HASH; }
3245 bool MemBarNode::cmp( const Node &n ) const {
3246   return (&n == this);          // Always fail except on self
3247 }
3248 
3249 //------------------------------make-------------------------------------------
3250 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
3251   switch (opcode) {
3252   case Op_MemBarAcquire:     return new MemBarAcquireNode(C, atp, pn);
3253   case Op_LoadFence:         return new LoadFenceNode(C, atp, pn);
3254   case Op_MemBarRelease:     return new MemBarReleaseNode(C, atp, pn);
3255   case Op_StoreFence:        return new StoreFenceNode(C, atp, pn);
3256   case Op_MemBarStoreStore:  return new MemBarStoreStoreNode(C, atp, pn);
3257   case Op_StoreStoreFence:   return new StoreStoreFenceNode(C, atp, pn);
3258   case Op_MemBarAcquireLock: return new MemBarAcquireLockNode(C, atp, pn);
3259   case Op_MemBarReleaseLock: return new MemBarReleaseLockNode(C, atp, pn);
3260   case Op_MemBarVolatile:    return new MemBarVolatileNode(C, atp, pn);
3261   case Op_MemBarCPUOrder:    return new MemBarCPUOrderNode(C, atp, pn);
3262   case Op_OnSpinWait:        return new OnSpinWaitNode(C, atp, pn);
3263   case Op_Initialize:        return new InitializeNode(C, atp, pn);

3264   default: ShouldNotReachHere(); return NULL;
3265   }
3266 }
3267 
3268 void MemBarNode::remove(PhaseIterGVN *igvn) {
3269   if (outcnt() != 2) {
3270     assert(Opcode() == Op_Initialize, "Only seen when there are no use of init memory");
3271     assert(outcnt() == 1, "Only control then");
3272   }
3273   if (trailing_store() || trailing_load_store()) {
3274     MemBarNode* leading = leading_membar();
3275     if (leading != NULL) {
3276       assert(leading->trailing_membar() == this, "inconsistent leading/trailing membars");
3277       leading->remove(igvn);
3278     }
3279   }
3280   if (proj_out_or_null(TypeFunc::Memory) != NULL) {
3281     igvn->replace_node(proj_out(TypeFunc::Memory), in(TypeFunc::Memory));
3282   }
3283   if (proj_out_or_null(TypeFunc::Control) != NULL) {

3483       } else {
3484         Node* in = n->in(0);
3485         if (in != NULL && !in->is_top()) {
3486           wq.push(in);
3487         }
3488       }
3489     }
3490   }
3491   assert(found == 1 || (found == 0 && leading == NULL), "consistency check failed");
3492 #endif
3493   if (leading == NULL) {
3494     return NULL;
3495   }
3496   MemBarNode* mb = leading->as_MemBar();
3497   assert((mb->_kind == LeadingStore && _kind == TrailingStore) ||
3498          (mb->_kind == LeadingLoadStore && _kind == TrailingLoadStore), "bad leading membar");
3499   assert(mb->_pair_idx == _pair_idx, "bad leading membar");
3500   return mb;
3501 }
3502 




















3503 
3504 //===========================InitializeNode====================================
3505 // SUMMARY:
3506 // This node acts as a memory barrier on raw memory, after some raw stores.
3507 // The 'cooked' oop value feeds from the Initialize, not the Allocation.
3508 // The Initialize can 'capture' suitably constrained stores as raw inits.
3509 // It can coalesce related raw stores into larger units (called 'tiles').
3510 // It can avoid zeroing new storage for memory units which have raw inits.
3511 // At macro-expansion, it is marked 'complete', and does not optimize further.
3512 //
3513 // EXAMPLE:
3514 // The object 'new short[2]' occupies 16 bytes in a 32-bit machine.
3515 //   ctl = incoming control; mem* = incoming memory
3516 // (Note:  A star * on a memory edge denotes I/O and other standard edges.)
3517 // First allocate uninitialized memory and fill in the header:
3518 //   alloc = (Allocate ctl mem* 16 #short[].klass ...)
3519 //   ctl := alloc.Control; mem* := alloc.Memory*
3520 //   rawmem = alloc.Memory; rawoop = alloc.RawAddress
3521 // Then initialize to zero the non-header parts of the raw memory block:
3522 //   init = (Initialize alloc.Control alloc.Memory* alloc.RawAddress)
< prev index next >