< prev index next > src/hotspot/share/opto/memnode.hpp
Print this page
// product builds and retain the debug info for debug builds.
const TypePtr *raw_adr_type() const {
return DEBUG_ONLY(_adr_type) NOT_DEBUG(nullptr);
}
+ #ifdef ASSERT
+ void set_adr_type(const TypePtr* adr_type) { _adr_type = adr_type; }
+ #endif
+
// Return the barrier data of n, if available, or 0 otherwise.
static uint8_t barrier_data(const Node* n);
// Map a load or store opcode to its corresponding store opcode.
// (Return -1 if unknown.)
virtual const Type* Value(PhaseGVN* phase) const;
virtual Node* Identity(PhaseGVN* phase);
virtual bool depends_only_on_test() const { return true; }
};
-
//------------------------------StoreNode--------------------------------------
// Store value; requires Store, Address and Value
class StoreNode : public MemNode {
private:
// On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
//------------------------------ClearArray-------------------------------------
class ClearArrayNode: public Node {
private:
bool _is_large;
+ bool _word_copy_only;
public:
- ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, bool is_large)
- : Node(ctrl,arymem,word_cnt,base), _is_large(is_large) {
+ ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, Node* val, bool is_large)
+ : Node(ctrl, arymem, word_cnt, base, val), _is_large(is_large),
+ _word_copy_only(val->bottom_type()->isa_long() && (!val->bottom_type()->is_long()->is_con() || val->bottom_type()->is_long()->get_con() != 0)) {
init_class_id(Class_ClearArray);
}
virtual int Opcode() const;
virtual const Type *bottom_type() const { return Type::MEMORY; }
// ClearArray modifies array elements, and so affects only the
virtual const class TypePtr *adr_type() const;
virtual Node* Identity(PhaseGVN* phase);
virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
virtual uint match_edge(uint idx) const;
bool is_large() const { return _is_large; }
+ bool word_copy_only() const { return _word_copy_only; }
// Clear the given area of an object or array.
// The start offset must always be aligned mod BytesPerInt.
// The end offset must always be aligned mod BytesPerLong.
// Return the new memory.
static Node* clear_memory(Node* control, Node* mem, Node* dest,
+ Node* val,
+ Node* raw_val,
intptr_t start_offset,
intptr_t end_offset,
PhaseGVN* phase);
static Node* clear_memory(Node* control, Node* mem, Node* dest,
+ Node* val,
+ Node* raw_val,
intptr_t start_offset,
Node* end_offset,
PhaseGVN* phase);
static Node* clear_memory(Node* control, Node* mem, Node* dest,
+ Node* raw_val,
Node* start_offset,
Node* end_offset,
PhaseGVN* phase);
// Return allocation input memory edge if it is different instance
// or itself if it is the one we are looking for.
virtual const class TypePtr *adr_type() const { return _adr_type; }
virtual const Type* Value(PhaseGVN* phase) const;
virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
virtual uint match_edge(uint idx) const { return 0; }
virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
- virtual Node *match( const ProjNode *proj, const Matcher *m );
+ virtual Node *match(const ProjNode *proj, const Matcher *m, const RegMask* mask);
// Factory method. Builds a wide or narrow membar.
// Optional 'precedent' becomes an extra edge if not null.
static MemBarNode* make(Compile* C, int opcode,
int alias_idx = Compile::AliasIdxBot,
Node* precedent = nullptr);
< prev index next >