< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "classfile/vmSymbols.hpp"
  28 #include "compiler/compileBroker.hpp"
  29 #include "compiler/compileLog.hpp"

  30 #include "oops/objArrayKlass.hpp"
  31 #include "opto/addnode.hpp"
  32 #include "opto/callGenerator.hpp"
  33 #include "opto/cfgnode.hpp"
  34 #include "opto/connode.hpp"
  35 #include "opto/idealKit.hpp"
  36 #include "opto/mathexactnode.hpp"
  37 #include "opto/mulnode.hpp"
  38 #include "opto/parse.hpp"
  39 #include "opto/runtime.hpp"
  40 #include "opto/subnode.hpp"
  41 #include "prims/nativeLookup.hpp"
  42 #include "runtime/sharedRuntime.hpp"
  43 #include "trace/traceMacros.hpp"
  44 
  45 class LibraryIntrinsic : public InlineCallGenerator {
  46   // Extend the set of intrinsics known to the runtime:
  47  public:
  48  private:
  49   bool             _is_virtual;
  50   bool             _does_virtual_dispatch;
  51   int8_t           _predicates_count;  // Intrinsic is predicated by several conditions
  52   int8_t           _last_predicate; // Last generated predicate
  53   vmIntrinsics::ID _intrinsic_id;
  54 
  55  public:
  56   LibraryIntrinsic(ciMethod* m, bool is_virtual, int predicates_count, bool does_virtual_dispatch, vmIntrinsics::ID id)
  57     : InlineCallGenerator(m),
  58       _is_virtual(is_virtual),
  59       _does_virtual_dispatch(does_virtual_dispatch),
  60       _predicates_count((int8_t)predicates_count),
  61       _last_predicate((int8_t)-1),
  62       _intrinsic_id(id)
  63   {


 219   bool inline_math_subtractExactI(bool is_decrement);
 220   bool inline_math_subtractExactL(bool is_decrement);
 221   bool inline_exp();
 222   bool inline_pow();
 223   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 224   bool inline_min_max(vmIntrinsics::ID id);
 225   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 226   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 227   int classify_unsafe_addr(Node* &base, Node* &offset);
 228   Node* make_unsafe_address(Node* base, Node* offset);
 229   // Helper for inline_unsafe_access.
 230   // Generates the guards that check whether the result of
 231   // Unsafe.getObject should be recorded in an SATB log buffer.
 232   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 233   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 234   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 235   static bool klass_needs_init_guard(Node* kls);
 236   bool inline_unsafe_allocate();
 237   bool inline_unsafe_copyMemory();
 238   bool inline_native_currentThread();
 239 #ifdef TRACE_HAVE_INTRINSICS
 240   bool inline_native_classID();
 241   bool inline_native_threadID();
 242 #endif
 243   bool inline_native_time_funcs(address method, const char* funcName);
 244   bool inline_native_isInterrupted();
 245   bool inline_native_Class_query(vmIntrinsics::ID id);
 246   bool inline_native_subtype_check();
 247 
 248   bool inline_native_newArray();
 249   bool inline_native_getLength();
 250   bool inline_array_copyOf(bool is_copyOfRange);
 251   bool inline_array_equals();
 252   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 253   bool inline_native_clone(bool is_virtual);
 254   bool inline_native_Reflection_getCallerClass();
 255   // Helper function for inlining native object hash method
 256   bool inline_native_hashcode(bool is_virtual, bool is_static);
 257   bool inline_native_getClass();
 258 
 259   // Helper functions for inlining arraycopy
 260   bool inline_arraycopy();
 261   void generate_arraycopy(const TypePtr* adr_type,


 862   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 863   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 864 
 865   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 866   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 867   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 868 
 869   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 870   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 871   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 872   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 873   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 874 
 875   case vmIntrinsics::_loadFence:
 876   case vmIntrinsics::_storeFence:
 877   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 878 
 879   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 880   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 881 
 882 #ifdef TRACE_HAVE_INTRINSICS
 883   case vmIntrinsics::_classID:                  return inline_native_classID();
 884   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 885   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 886 #endif
 887   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 888   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 889   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 890   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 891   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 892   case vmIntrinsics::_getLength:                return inline_native_getLength();
 893   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 894   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 895   case vmIntrinsics::_equalsC:                  return inline_array_equals();
 896   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 897 
 898   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 899 
 900   case vmIntrinsics::_isInstance:
 901   case vmIntrinsics::_getModifiers:
 902   case vmIntrinsics::_isInterface:
 903   case vmIntrinsics::_isArray:
 904   case vmIntrinsics::_isPrimitive:
 905   case vmIntrinsics::_getSuperclass:


3226 
3227   Node* test = NULL;
3228   if (LibraryCallKit::klass_needs_init_guard(kls)) {
3229     // Note:  The argument might still be an illegal value like
3230     // Serializable.class or Object[].class.   The runtime will handle it.
3231     // But we must make an explicit check for initialization.
3232     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
3233     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3234     // can generate code to load it as unsigned byte.
3235     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3236     Node* bits = intcon(InstanceKlass::fully_initialized);
3237     test = _gvn.transform(new (C) SubINode(inst, bits));
3238     // The 'test' is non-zero if we need to take a slow path.
3239   }
3240 
3241   Node* obj = new_instance(kls, test);
3242   set_result(obj);
3243   return true;
3244 }
3245 
3246 #ifdef TRACE_HAVE_INTRINSICS
3247 /*
3248  * oop -> myklass
3249  * myklass->trace_id |= USED
3250  * return myklass->trace_id & ~0x3
3251  */
3252 bool LibraryCallKit::inline_native_classID() {
3253   null_check_receiver();  // null-check, then ignore
3254   Node* cls = null_check(argument(1), T_OBJECT);
3255   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3256   kls = null_check(kls, T_OBJECT);
3257   ByteSize offset = TRACE_ID_OFFSET;

3258   Node* insp = basic_plus_adr(kls, in_bytes(offset));
3259   Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
3260   Node* bits = longcon(~0x03l); // ignore bit 0 & 1
3261   Node* andl = _gvn.transform(new (C) AndLNode(tvalue, bits));
3262   Node* clsused = longcon(0x01l); // set the class bit
3263   Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused));
3264 
3265   const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3266   store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3267   set_result(andl);










3268   return true;
3269 }
3270 
3271 bool LibraryCallKit::inline_native_threadID() {
3272   Node* tls_ptr = NULL;
3273   Node* cur_thr = generate_current_thread(tls_ptr);
3274   Node* p = basic_plus_adr(top()/*!oop*/, tls_ptr, in_bytes(JavaThread::osthread_offset()));
3275   Node* osthread = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3276   p = basic_plus_adr(top()/*!oop*/, osthread, in_bytes(OSThread::thread_id_offset()));
3277 
3278   Node* threadid = NULL;
3279   size_t thread_id_size = OSThread::thread_id_size();
3280   if (thread_id_size == (size_t) BytesPerLong) {
3281     threadid = ConvL2I(make_load(control(), p, TypeLong::LONG, T_LONG, MemNode::unordered));
3282   } else if (thread_id_size == (size_t) BytesPerInt) {
3283     threadid = make_load(control(), p, TypeInt::INT, T_INT, MemNode::unordered);
3284   } else {
3285     ShouldNotReachHere();
3286   }
3287   set_result(threadid);

















3288   return true;
3289 }
3290 #endif
3291 
3292 //------------------------inline_native_time_funcs--------------
3293 // inline code for System.currentTimeMillis() and System.nanoTime()
3294 // these have the same type and signature
3295 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3296   const TypeFunc* tf = OptoRuntime::void_long_Type();
3297   const TypePtr* no_memory_effects = NULL;
3298   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3299   Node* value = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+0));
3300 #ifdef ASSERT
3301   Node* value_top = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+1));
3302   assert(value_top == top(), "second value must be top");
3303 #endif
3304   set_result(value);
3305   return true;
3306 }
3307 
3308 //------------------------inline_native_currentThread------------------
3309 bool LibraryCallKit::inline_native_currentThread() {
3310   Node* junk = NULL;




  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "classfile/vmSymbols.hpp"
  28 #include "compiler/compileBroker.hpp"
  29 #include "compiler/compileLog.hpp"
  30 #include "jfr/support/jfrIntrinsics.hpp"
  31 #include "oops/objArrayKlass.hpp"
  32 #include "opto/addnode.hpp"
  33 #include "opto/callGenerator.hpp"
  34 #include "opto/cfgnode.hpp"
  35 #include "opto/connode.hpp"
  36 #include "opto/idealKit.hpp"
  37 #include "opto/mathexactnode.hpp"
  38 #include "opto/mulnode.hpp"
  39 #include "opto/parse.hpp"
  40 #include "opto/runtime.hpp"
  41 #include "opto/subnode.hpp"
  42 #include "prims/nativeLookup.hpp"
  43 #include "runtime/sharedRuntime.hpp"
  44 #include "utilities/macros.hpp"
  45 
  46 class LibraryIntrinsic : public InlineCallGenerator {
  47   // Extend the set of intrinsics known to the runtime:
  48  public:
  49  private:
  50   bool             _is_virtual;
  51   bool             _does_virtual_dispatch;
  52   int8_t           _predicates_count;  // Intrinsic is predicated by several conditions
  53   int8_t           _last_predicate; // Last generated predicate
  54   vmIntrinsics::ID _intrinsic_id;
  55 
  56  public:
  57   LibraryIntrinsic(ciMethod* m, bool is_virtual, int predicates_count, bool does_virtual_dispatch, vmIntrinsics::ID id)
  58     : InlineCallGenerator(m),
  59       _is_virtual(is_virtual),
  60       _does_virtual_dispatch(does_virtual_dispatch),
  61       _predicates_count((int8_t)predicates_count),
  62       _last_predicate((int8_t)-1),
  63       _intrinsic_id(id)
  64   {


 220   bool inline_math_subtractExactI(bool is_decrement);
 221   bool inline_math_subtractExactL(bool is_decrement);
 222   bool inline_exp();
 223   bool inline_pow();
 224   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 225   bool inline_min_max(vmIntrinsics::ID id);
 226   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 227   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 228   int classify_unsafe_addr(Node* &base, Node* &offset);
 229   Node* make_unsafe_address(Node* base, Node* offset);
 230   // Helper for inline_unsafe_access.
 231   // Generates the guards that check whether the result of
 232   // Unsafe.getObject should be recorded in an SATB log buffer.
 233   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 234   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 235   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 236   static bool klass_needs_init_guard(Node* kls);
 237   bool inline_unsafe_allocate();
 238   bool inline_unsafe_copyMemory();
 239   bool inline_native_currentThread();
 240 #ifdef JFR_HAVE_INTRINSICS
 241   bool inline_native_classID();
 242   bool inline_native_getEventWriter();
 243 #endif
 244   bool inline_native_time_funcs(address method, const char* funcName);
 245   bool inline_native_isInterrupted();
 246   bool inline_native_Class_query(vmIntrinsics::ID id);
 247   bool inline_native_subtype_check();
 248 
 249   bool inline_native_newArray();
 250   bool inline_native_getLength();
 251   bool inline_array_copyOf(bool is_copyOfRange);
 252   bool inline_array_equals();
 253   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 254   bool inline_native_clone(bool is_virtual);
 255   bool inline_native_Reflection_getCallerClass();
 256   // Helper function for inlining native object hash method
 257   bool inline_native_hashcode(bool is_virtual, bool is_static);
 258   bool inline_native_getClass();
 259 
 260   // Helper functions for inlining arraycopy
 261   bool inline_arraycopy();
 262   void generate_arraycopy(const TypePtr* adr_type,


 863   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 864   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 865 
 866   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 867   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 868   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 869 
 870   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 871   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 872   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 873   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 874   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 875 
 876   case vmIntrinsics::_loadFence:
 877   case vmIntrinsics::_storeFence:
 878   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 879 
 880   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 881   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 882 
 883 #ifdef JFR_HAVE_INTRINSICS
 884   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
 885   case vmIntrinsics::_getClassId:               return inline_native_classID();
 886   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 887 #endif
 888   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 889   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 890   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 891   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 892   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 893   case vmIntrinsics::_getLength:                return inline_native_getLength();
 894   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 895   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 896   case vmIntrinsics::_equalsC:                  return inline_array_equals();
 897   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 898 
 899   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 900 
 901   case vmIntrinsics::_isInstance:
 902   case vmIntrinsics::_getModifiers:
 903   case vmIntrinsics::_isInterface:
 904   case vmIntrinsics::_isArray:
 905   case vmIntrinsics::_isPrimitive:
 906   case vmIntrinsics::_getSuperclass:


3227 
3228   Node* test = NULL;
3229   if (LibraryCallKit::klass_needs_init_guard(kls)) {
3230     // Note:  The argument might still be an illegal value like
3231     // Serializable.class or Object[].class.   The runtime will handle it.
3232     // But we must make an explicit check for initialization.
3233     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
3234     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3235     // can generate code to load it as unsigned byte.
3236     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3237     Node* bits = intcon(InstanceKlass::fully_initialized);
3238     test = _gvn.transform(new (C) SubINode(inst, bits));
3239     // The 'test' is non-zero if we need to take a slow path.
3240   }
3241 
3242   Node* obj = new_instance(kls, test);
3243   set_result(obj);
3244   return true;
3245 }
3246 
3247 #ifdef JFR_HAVE_INTRINSICS
3248 /*
3249  * oop -> myklass
3250  * myklass->trace_id |= USED
3251  * return myklass->trace_id & ~0x3
3252  */
3253 bool LibraryCallKit::inline_native_classID() {
3254   Node* cls = null_check(argument(0), T_OBJECT);

3255   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3256   kls = null_check(kls, T_OBJECT);
3257 
3258   ByteSize offset = KLASS_TRACE_ID_OFFSET;
3259   Node* insp = basic_plus_adr(kls, in_bytes(offset));
3260   Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
3261 

3262   Node* clsused = longcon(0x01l); // set the class bit
3263   Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused));

3264   const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3265   store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3266 
3267 #ifdef TRACE_ID_META_BITS
3268   Node* mbits = longcon(~TRACE_ID_META_BITS);
3269   tvalue = _gvn.transform(new (C) AndLNode(tvalue, mbits));
3270 #endif
3271 #ifdef TRACE_ID_SHIFT
3272   Node* cbits = intcon(TRACE_ID_SHIFT);
3273   tvalue = _gvn.transform(new (C) URShiftLNode(tvalue, cbits));
3274 #endif
3275 
3276   set_result(tvalue);
3277   return true;
3278 }
3279 
3280 bool LibraryCallKit::inline_native_getEventWriter() {
3281   Node* tls_ptr = _gvn.transform(new (C) ThreadLocalNode());
3282 
3283   Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
3284                                   in_bytes(THREAD_LOCAL_WRITER_OFFSET_JFR)
3285                                   );
3286 
3287   Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
3288 
3289   Node* jobj_cmp_null = _gvn.transform( new (C) CmpPNode(jobj, null()) );
3290   Node* test_jobj_eq_null  = _gvn.transform( new (C) BoolNode(jobj_cmp_null, BoolTest::eq) );
3291 
3292   IfNode* iff_jobj_null =
3293     create_and_map_if(control(), test_jobj_eq_null, PROB_MIN, COUNT_UNKNOWN);
3294 
3295   enum { _normal_path = 1,
3296          _null_path = 2,
3297          PATH_LIMIT };
3298 
3299   RegionNode* result_rgn = new (C) RegionNode(PATH_LIMIT);
3300   PhiNode*    result_val = new (C) PhiNode(result_rgn, TypePtr::BOTTOM);
3301 
3302   Node* jobj_is_null = _gvn.transform(new (C) IfTrueNode(iff_jobj_null));
3303   result_rgn->init_req(_null_path, jobj_is_null);
3304   result_val->init_req(_null_path, null());
3305 
3306   Node* jobj_is_not_null = _gvn.transform(new (C) IfFalseNode(iff_jobj_null));
3307   result_rgn->init_req(_normal_path, jobj_is_not_null);
3308 
3309   Node* res = make_load(jobj_is_not_null, jobj, TypeInstPtr::NOTNULL, T_OBJECT, MemNode::unordered);
3310   result_val->init_req(_normal_path, res);
3311 
3312   set_result(result_rgn, result_val);
3313 
3314   return true;
3315 }
3316 #endif // JFR_HAVE_INTRINSICS
3317 
3318 //------------------------inline_native_time_funcs--------------
3319 // inline code for System.currentTimeMillis() and System.nanoTime()
3320 // these have the same type and signature
3321 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3322   const TypeFunc* tf = OptoRuntime::void_long_Type();
3323   const TypePtr* no_memory_effects = NULL;
3324   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3325   Node* value = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+0));
3326 #ifdef ASSERT
3327   Node* value_top = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+1));
3328   assert(value_top == top(), "second value must be top");
3329 #endif
3330   set_result(value);
3331   return true;
3332 }
3333 
3334 //------------------------inline_native_currentThread------------------
3335 bool LibraryCallKit::inline_native_currentThread() {
3336   Node* junk = NULL;


< prev index next >