< prev index next > src/hotspot/share/opto/callnode.hpp
Print this page
#include "opto/connode.hpp"
#include "opto/mulnode.hpp"
#include "opto/multnode.hpp"
#include "opto/opcodes.hpp"
+ #include "opto/partialEscape.hpp"
#include "opto/phaseX.hpp"
#include "opto/replacednodes.hpp"
#include "opto/type.hpp"
#include "utilities/growableArray.hpp"
uint _sp; // Java Expression Stack Pointer for this state
int _bci; // Byte Code Index of this JVM point
ReexecuteState _reexecute; // Whether this bytecode need to be re-executed
ciMethod* _method; // Method Pointer
SafePointNode* _map; // Map node associated with this scope
+ PEAState _alloc_state;
+
public:
friend class Compile;
friend class PreserveReexecuteState;
// Because JVMState objects live over the entire lifetime of the
JVMState* clone_deep(Compile* C) const; // recursively clones caller chain
JVMState* clone_shallow(Compile* C) const; // retains uncloned caller
void set_map_deep(SafePointNode *map);// reset map for all callers
void adapt_position(int delta); // Adapt offsets in in-array after adding an edge.
int interpreter_frame_size() const;
+ PEAState& alloc_state() { return _alloc_state; }
#ifndef PRODUCT
void print_method_with_lineno(outputStream* st, bool show_name) const;
void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;
void dump_spec(outputStream *st) const;
// the _is_io_use flag in the projection.) This is needed when expanding the node in
// order to differentiate the uses of the projection on the normal control path from
// those on the exception return path.
//
class AllocateNode : public CallNode {
+ private:
+ int _materialized; // materializaiton counts by PEA
+
public:
enum {
// Output:
RawAddress = TypeFunc::Parms, // the newly-allocated raw address
// Inputs:
// If object doesn't escape in <.init> method and there is memory barrier
// inserted at exit of its <.init>, memory barrier for new is not necessary.
// Inovke this method when MemBar at exit of initializer and post-dominate
// allocation node.
void compute_MemBar_redundancy(ciMethod* initializer);
- bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
+ bool is_allocation_MemBar_redundant() const { return _is_allocation_MemBar_redundant; }
Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem);
+
+ const TypeOopPtr* oop_type(const PhaseValues& phase) const;
+
+ void inc_materialized() {
+ _materialized++;
+ }
+
+ int materialized_cnt() const {
+ return _materialized;
+ }
};
//------------------------------AllocateArray---------------------------------
//
// High-level array allocation
< prev index next >