3959 // Try to get a better type than POS for the size
3960 ary_type = ary_type->is_aryptr()->cast_to_size(length_type);
3961 }
3962
3963 Node* javaoop = set_output_for_allocation(alloc, ary_type, deoptimize_on_exception);
3964
3965 array_ideal_length(alloc, ary_type, true);
3966 return javaoop;
3967 }
3968
3969 // The following "Ideal_foo" functions are placed here because they recognize
3970 // the graph shapes created by the functions immediately above.
3971
3972 //---------------------------Ideal_allocation----------------------------------
3973 // Given an oop pointer or raw pointer, see if it feeds from an AllocateNode.
3974 AllocateNode* AllocateNode::Ideal_allocation(Node* ptr) {
3975 if (ptr == nullptr) { // reduce dumb test in callers
3976 return nullptr;
3977 }
3978
3979 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
3980 ptr = bs->step_over_gc_barrier(ptr);
3981
3982 if (ptr->is_CheckCastPP()) { // strip only one raw-to-oop cast
3983 ptr = ptr->in(1);
3984 if (ptr == nullptr) return nullptr;
3985 }
3986 // Return null for allocations with several casts:
3987 // j.l.reflect.Array.newInstance(jobject, jint)
3988 // Object.clone()
3989 // to keep more precise type from last cast.
3990 if (ptr->is_Proj()) {
3991 Node* allo = ptr->in(0);
3992 if (allo != nullptr && allo->is_Allocate()) {
3993 return allo->as_Allocate();
3994 }
3995 }
3996 // Report failure to match.
3997 return nullptr;
3998 }
3999
4000 // Fancy version which also strips off an offset (and reports it to caller).
4001 AllocateNode* AllocateNode::Ideal_allocation(Node* ptr, PhaseValues* phase,
|
3959 // Try to get a better type than POS for the size
3960 ary_type = ary_type->is_aryptr()->cast_to_size(length_type);
3961 }
3962
3963 Node* javaoop = set_output_for_allocation(alloc, ary_type, deoptimize_on_exception);
3964
3965 array_ideal_length(alloc, ary_type, true);
3966 return javaoop;
3967 }
3968
3969 // The following "Ideal_foo" functions are placed here because they recognize
3970 // the graph shapes created by the functions immediately above.
3971
3972 //---------------------------Ideal_allocation----------------------------------
3973 // Given an oop pointer or raw pointer, see if it feeds from an AllocateNode.
3974 AllocateNode* AllocateNode::Ideal_allocation(Node* ptr) {
3975 if (ptr == nullptr) { // reduce dumb test in callers
3976 return nullptr;
3977 }
3978
3979 if (ptr->is_CheckCastPP()) { // strip only one raw-to-oop cast
3980 ptr = ptr->in(1);
3981 if (ptr == nullptr) return nullptr;
3982 }
3983 // Return null for allocations with several casts:
3984 // j.l.reflect.Array.newInstance(jobject, jint)
3985 // Object.clone()
3986 // to keep more precise type from last cast.
3987 if (ptr->is_Proj()) {
3988 Node* allo = ptr->in(0);
3989 if (allo != nullptr && allo->is_Allocate()) {
3990 return allo->as_Allocate();
3991 }
3992 }
3993 // Report failure to match.
3994 return nullptr;
3995 }
3996
3997 // Fancy version which also strips off an offset (and reports it to caller).
3998 AllocateNode* AllocateNode::Ideal_allocation(Node* ptr, PhaseValues* phase,
|