< prev index next > src/hotspot/share/opto/library_call.hpp
Print this page
#include "classfile/javaClasses.hpp"
#include "opto/callGenerator.hpp"
#include "opto/graphKit.hpp"
#include "opto/castnode.hpp"
#include "opto/convertnode.hpp"
+ #include "opto/inlinetypenode.hpp"
#include "opto/intrinsicnode.hpp"
#include "opto/movenode.hpp"
class LibraryIntrinsic : public InlineCallGenerator {
// Extend the set of intrinsics known to the runtime:
bool try_to_inline(int predicate);
Node* try_to_predicate(int predicate);
void push_result() {
// Push the result onto the stack.
! if (!stopped() && result() != nullptr) {
! BasicType bt = result()->bottom_type()->basic_type();
! push_node(bt, result());
}
}
private:
void fatal_unexpected_iid(vmIntrinsics::ID iid) {
bool try_to_inline(int predicate);
Node* try_to_predicate(int predicate);
void push_result() {
// Push the result onto the stack.
! Node* res = result();
! if (!stopped() && res != nullptr) {
! BasicType bt = res->bottom_type()->basic_type();
+ if (C->inlining_incrementally() && res->is_InlineType()) {
+ // The caller expects an oop when incrementally inlining an intrinsic that returns an
+ // inline type. Make sure the call is re-executed if the allocation triggers a deoptimization.
+ PreserveReexecuteState preexecs(this);
+ jvms()->set_should_reexecute(true);
+ res = res->as_InlineType()->buffer(this);
+ }
+ push_node(bt, res);
}
}
private:
void fatal_unexpected_iid(vmIntrinsics::ID iid) {
Node* length, bool char_count);
Node* current_thread_helper(Node* &tls_output, ByteSize handle_offset,
bool is_immutable);
Node* generate_current_thread(Node* &tls_output);
Node* generate_virtual_thread(Node* threadObj);
- Node* load_mirror_from_klass(Node* klass);
Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null,
RegionNode* region, int null_path,
int offset);
Node* load_klass_from_mirror(Node* mirror, bool never_see_null,
RegionNode* region, int null_path) {
ByteSize offset, const Type* type, BasicType bt);
Node* generate_misc_flags_guard(Node* kls,
int modifier_mask, int modifier_bits,
RegionNode* region);
Node* generate_interface_guard(Node* kls, RegionNode* region);
Node* generate_hidden_class_guard(Node* kls, RegionNode* region);
Node* generate_array_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, false, false);
}
Node* generate_non_array_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, false, true);
}
Node* generate_objArray_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, true, false);
}
Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, true, true);
}
! Node* generate_array_guard_common(Node* kls, RegionNode* region,
- bool obj_array, bool not_array);
Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
CallJavaNode* generate_method_call(vmIntrinsicID method_id, bool is_virtual, bool is_static, bool res_not_null);
CallJavaNode* generate_method_call_static(vmIntrinsicID method_id, bool res_not_null) {
return generate_method_call(method_id, false, true, res_not_null);
}
ByteSize offset, const Type* type, BasicType bt);
Node* generate_misc_flags_guard(Node* kls,
int modifier_mask, int modifier_bits,
RegionNode* region);
Node* generate_interface_guard(Node* kls, RegionNode* region);
+
+ enum ArrayKind {
+ AnyArray,
+ NonArray,
+ ObjectArray,
+ NonObjectArray,
+ TypeArray
+ };
+
Node* generate_hidden_class_guard(Node* kls, RegionNode* region);
+
Node* generate_array_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, AnyArray);
}
Node* generate_non_array_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, NonArray);
}
Node* generate_objArray_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, ObjectArray);
}
Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
! return generate_array_guard_common(kls, region, NonObjectArray);
+ }
+ Node* generate_typeArray_guard(Node* kls, RegionNode* region) {
+ return generate_array_guard_common(kls, region, TypeArray);
}
! Node* generate_array_guard_common(Node* kls, RegionNode* region, ArrayKind kind);
Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
CallJavaNode* generate_method_call(vmIntrinsicID method_id, bool is_virtual, bool is_static, bool res_not_null);
CallJavaNode* generate_method_call_static(vmIntrinsicID method_id, bool res_not_null) {
return generate_method_call(method_id, false, true, res_not_null);
}
int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
! bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
static bool klass_needs_init_guard(Node* kls);
bool inline_unsafe_allocate();
bool inline_unsafe_newArray(bool uninitialized);
bool inline_unsafe_writeback0();
bool inline_unsafe_writebackSync0(bool is_pre);
bool inline_unsafe_copyMemory();
bool inline_unsafe_setMemory();
bool inline_native_currentCarrierThread();
bool inline_native_currentThread();
bool inline_native_setCurrentThread();
int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
! bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned, bool is_flat = false);
static bool klass_needs_init_guard(Node* kls);
bool inline_unsafe_allocate();
bool inline_unsafe_newArray(bool uninitialized);
+ bool inline_newNullRestrictedArray();
bool inline_unsafe_writeback0();
bool inline_unsafe_writebackSync0(bool is_pre);
bool inline_unsafe_copyMemory();
+ bool inline_unsafe_isFlatArray();
+ bool inline_unsafe_make_private_buffer();
+ bool inline_unsafe_finish_private_buffer();
bool inline_unsafe_setMemory();
bool inline_native_currentCarrierThread();
bool inline_native_currentThread();
bool inline_native_setCurrentThread();
bool inline_native_getEventWriter();
bool inline_native_jvm_commit();
void extend_setCurrentThread(Node* jt, Node* thread);
#endif
bool inline_native_Class_query(vmIntrinsics::ID id);
+ bool inline_primitive_Class_conversion(vmIntrinsics::ID id);
bool inline_native_subtype_check();
bool inline_native_getLength();
bool inline_array_copyOf(bool is_copyOfRange);
bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
bool inline_preconditions_checkIndex(BasicType bt);
< prev index next >