246 assert(phase->C->get_alias_index(t) == phase->C->get_alias_index(t_adr), "correct memory chain");
247 }
248 }
249 return result;
250 }
251
252 static Node *step_through_mergemem(PhaseGVN *phase, MergeMemNode *mmem, const TypePtr *tp, const TypePtr *adr_check, outputStream *st) {
253 uint alias_idx = phase->C->get_alias_index(tp);
254 Node *mem = mmem;
255 #ifdef ASSERT
256 {
257 // Check that current type is consistent with the alias index used during graph construction
258 assert(alias_idx >= Compile::AliasIdxRaw, "must not be a bad alias_idx");
259 bool consistent = adr_check == nullptr || adr_check->empty() ||
260 phase->C->must_alias(adr_check, alias_idx );
261 // Sometimes dead array references collapse to a[-1], a[-2], or a[-3]
262 if( !consistent && adr_check != nullptr && !adr_check->empty() &&
263 tp->isa_aryptr() && tp->offset() == Type::OffsetBot &&
264 adr_check->isa_aryptr() && adr_check->offset() != Type::OffsetBot &&
265 ( adr_check->offset() == arrayOopDesc::length_offset_in_bytes() ||
266 adr_check->offset() == oopDesc::klass_offset_in_bytes() ||
267 adr_check->offset() == oopDesc::mark_offset_in_bytes() ) ) {
268 // don't assert if it is dead code.
269 consistent = true;
270 }
271 if( !consistent ) {
272 st->print("alias_idx==%d, adr_check==", alias_idx);
273 if( adr_check == nullptr ) {
274 st->print("null");
275 } else {
276 adr_check->dump();
277 }
278 st->cr();
279 print_alias_types();
280 assert(consistent, "adr_check must match alias idx");
281 }
282 }
283 #endif
284 // TypeOopPtr::NOTNULL+any is an OOP with unknown offset - generally
285 // means an array I have not precisely typed yet. Do not do any
286 // alias stuff with it any time soon.
927
928 for (size_t i = 0; i < sizeof offsets / sizeof offsets[0]; i++) {
929 if (offset == offsets[i]) {
930 return true;
931 }
932 }
933 }
934
935 return false;
936 }
937 #endif
938
939 //----------------------------LoadNode::make-----------------------------------
940 // Polymorphic factory method:
941 Node* LoadNode::make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, BasicType bt, MemOrd mo,
942 ControlDependency control_dependency, bool require_atomic_access, bool unaligned, bool mismatched, bool unsafe, uint8_t barrier_data) {
943 Compile* C = gvn.C;
944
945 // sanity check the alias category against the created node type
946 assert(!(adr_type->isa_oopptr() &&
947 adr_type->offset() == oopDesc::klass_offset_in_bytes()),
948 "use LoadKlassNode instead");
949 assert(!(adr_type->isa_aryptr() &&
950 adr_type->offset() == arrayOopDesc::length_offset_in_bytes()),
951 "use LoadRangeNode instead");
952 // Check control edge of raw loads
953 assert( ctl != nullptr || C->get_alias_index(adr_type) != Compile::AliasIdxRaw ||
954 // oop will be recorded in oop map if load crosses safepoint
955 rt->isa_oopptr() || is_immutable_value(adr),
956 "raw memory operations should have control edge");
957 LoadNode* load = nullptr;
958 switch (bt) {
959 case T_BOOLEAN: load = new LoadUBNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
960 case T_BYTE: load = new LoadBNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
961 case T_INT: load = new LoadINode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
962 case T_CHAR: load = new LoadUSNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
963 case T_SHORT: load = new LoadSNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
964 case T_LONG: load = new LoadLNode (ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency, require_atomic_access); break;
965 case T_FLOAT: load = new LoadFNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); break;
966 case T_DOUBLE: load = new LoadDNode (ctl, mem, adr, adr_type, rt, mo, control_dependency, require_atomic_access); break;
967 case T_ADDRESS: load = new LoadPNode (ctl, mem, adr, adr_type, rt->is_ptr(), mo, control_dependency); break;
2455 // constant oop => constant klass
2456 if (offset == java_lang_Class::array_klass_offset()) {
2457 if (t->is_void()) {
2458 // We cannot create a void array. Since void is a primitive type return null
2459 // klass. Users of this result need to do a null check on the returned klass.
2460 return TypePtr::NULL_PTR;
2461 }
2462 return TypeKlassPtr::make(ciArrayKlass::make(t), Type::trust_interfaces);
2463 }
2464 if (!t->is_klass()) {
2465 // a primitive Class (e.g., int.class) has null for a klass field
2466 return TypePtr::NULL_PTR;
2467 }
2468 // (Folds up the 1st indirection in aClassConstant.getModifiers().)
2469 return TypeKlassPtr::make(t->as_klass(), Type::trust_interfaces);
2470 }
2471 // non-constant mirror, so we can't tell what's going on
2472 }
2473 if (!tinst->is_loaded())
2474 return _type; // Bail out if not loaded
2475 if (offset == oopDesc::klass_offset_in_bytes()) {
2476 return tinst->as_klass_type(true);
2477 }
2478 }
2479
2480 // Check for loading klass from an array
2481 const TypeAryPtr *tary = tp->isa_aryptr();
2482 if (tary != nullptr &&
2483 tary->offset() == oopDesc::klass_offset_in_bytes()) {
2484 return tary->as_klass_type(true);
2485 }
2486
2487 // Check for loading klass from an array klass
2488 const TypeKlassPtr *tkls = tp->isa_klassptr();
2489 if (tkls != nullptr && !StressReflectiveCode) {
2490 if (!tkls->is_loaded())
2491 return _type; // Bail out if not loaded
2492 if (tkls->isa_aryklassptr() && tkls->is_aryklassptr()->elem()->isa_klassptr() &&
2493 tkls->offset() == in_bytes(ObjArrayKlass::element_klass_offset())) {
2494 // // Always returning precise element type is incorrect,
2495 // // e.g., element type could be object and array may contain strings
2496 // return TypeKlassPtr::make(TypePtr::Constant, elem, 0);
2497
2498 // The array's TypeKlassPtr was declared 'precise' or 'not precise'
2499 // according to the element type's subclassing.
2500 return tkls->is_aryklassptr()->elem()->isa_klassptr()->cast_to_exactness(tkls->klass_is_exact());
2501 }
2502 if (tkls->isa_instklassptr() != nullptr && tkls->klass_is_exact() &&
2503 tkls->offset() == in_bytes(Klass::super_offset())) {
2529 Node* x = LoadNode::Identity(phase);
2530 if (x != this) return x;
2531
2532 // Take apart the address into an oop and offset.
2533 // Return 'this' if we cannot.
2534 Node* adr = in(MemNode::Address);
2535 intptr_t offset = 0;
2536 Node* base = AddPNode::Ideal_base_and_offset(adr, phase, offset);
2537 if (base == nullptr) return this;
2538 const TypeOopPtr* toop = phase->type(adr)->isa_oopptr();
2539 if (toop == nullptr) return this;
2540
2541 // Step over potential GC barrier for OopHandle resolve
2542 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2543 if (bs->is_gc_barrier_node(base)) {
2544 base = bs->step_over_gc_barrier(base);
2545 }
2546
2547 // We can fetch the klass directly through an AllocateNode.
2548 // This works even if the klass is not constant (clone or newArray).
2549 if (offset == oopDesc::klass_offset_in_bytes()) {
2550 Node* allocated_klass = AllocateNode::Ideal_klass(base, phase);
2551 if (allocated_klass != nullptr) {
2552 return allocated_klass;
2553 }
2554 }
2555
2556 // Simplify k.java_mirror.as_klass to plain k, where k is a Klass*.
2557 // See inline_native_Class_query for occurrences of these patterns.
2558 // Java Example: x.getClass().isAssignableFrom(y)
2559 //
2560 // This improves reflective code, often making the Class
2561 // mirror go completely dead. (Current exception: Class
2562 // mirrors may appear in debug info, but we could clean them out by
2563 // introducing a new debug info operator for Klass.java_mirror).
2564
2565 if (toop->isa_instptr() && toop->is_instptr()->instance_klass() == phase->C->env()->Class_klass()
2566 && offset == java_lang_Class::klass_offset()) {
2567 if (base->is_Load()) {
2568 Node* base2 = base->in(MemNode::Address);
2569 if (base2->is_Load()) { /* direct load of a load which is the OopHandle */
|
246 assert(phase->C->get_alias_index(t) == phase->C->get_alias_index(t_adr), "correct memory chain");
247 }
248 }
249 return result;
250 }
251
252 static Node *step_through_mergemem(PhaseGVN *phase, MergeMemNode *mmem, const TypePtr *tp, const TypePtr *adr_check, outputStream *st) {
253 uint alias_idx = phase->C->get_alias_index(tp);
254 Node *mem = mmem;
255 #ifdef ASSERT
256 {
257 // Check that current type is consistent with the alias index used during graph construction
258 assert(alias_idx >= Compile::AliasIdxRaw, "must not be a bad alias_idx");
259 bool consistent = adr_check == nullptr || adr_check->empty() ||
260 phase->C->must_alias(adr_check, alias_idx );
261 // Sometimes dead array references collapse to a[-1], a[-2], or a[-3]
262 if( !consistent && adr_check != nullptr && !adr_check->empty() &&
263 tp->isa_aryptr() && tp->offset() == Type::OffsetBot &&
264 adr_check->isa_aryptr() && adr_check->offset() != Type::OffsetBot &&
265 ( adr_check->offset() == arrayOopDesc::length_offset_in_bytes() ||
266 adr_check->offset() == Type::klass_offset() ||
267 adr_check->offset() == oopDesc::mark_offset_in_bytes() ) ) {
268 // don't assert if it is dead code.
269 consistent = true;
270 }
271 if( !consistent ) {
272 st->print("alias_idx==%d, adr_check==", alias_idx);
273 if( adr_check == nullptr ) {
274 st->print("null");
275 } else {
276 adr_check->dump();
277 }
278 st->cr();
279 print_alias_types();
280 assert(consistent, "adr_check must match alias idx");
281 }
282 }
283 #endif
284 // TypeOopPtr::NOTNULL+any is an OOP with unknown offset - generally
285 // means an array I have not precisely typed yet. Do not do any
286 // alias stuff with it any time soon.
927
928 for (size_t i = 0; i < sizeof offsets / sizeof offsets[0]; i++) {
929 if (offset == offsets[i]) {
930 return true;
931 }
932 }
933 }
934
935 return false;
936 }
937 #endif
938
939 //----------------------------LoadNode::make-----------------------------------
940 // Polymorphic factory method:
941 Node* LoadNode::make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, BasicType bt, MemOrd mo,
942 ControlDependency control_dependency, bool require_atomic_access, bool unaligned, bool mismatched, bool unsafe, uint8_t barrier_data) {
943 Compile* C = gvn.C;
944
945 // sanity check the alias category against the created node type
946 assert(!(adr_type->isa_oopptr() &&
947 adr_type->offset() == Type::klass_offset()),
948 "use LoadKlassNode instead");
949 assert(!(adr_type->isa_aryptr() &&
950 adr_type->offset() == arrayOopDesc::length_offset_in_bytes()),
951 "use LoadRangeNode instead");
952 // Check control edge of raw loads
953 assert( ctl != nullptr || C->get_alias_index(adr_type) != Compile::AliasIdxRaw ||
954 // oop will be recorded in oop map if load crosses safepoint
955 rt->isa_oopptr() || is_immutable_value(adr),
956 "raw memory operations should have control edge");
957 LoadNode* load = nullptr;
958 switch (bt) {
959 case T_BOOLEAN: load = new LoadUBNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
960 case T_BYTE: load = new LoadBNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
961 case T_INT: load = new LoadINode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
962 case T_CHAR: load = new LoadUSNode(ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
963 case T_SHORT: load = new LoadSNode (ctl, mem, adr, adr_type, rt->is_int(), mo, control_dependency); break;
964 case T_LONG: load = new LoadLNode (ctl, mem, adr, adr_type, rt->is_long(), mo, control_dependency, require_atomic_access); break;
965 case T_FLOAT: load = new LoadFNode (ctl, mem, adr, adr_type, rt, mo, control_dependency); break;
966 case T_DOUBLE: load = new LoadDNode (ctl, mem, adr, adr_type, rt, mo, control_dependency, require_atomic_access); break;
967 case T_ADDRESS: load = new LoadPNode (ctl, mem, adr, adr_type, rt->is_ptr(), mo, control_dependency); break;
2455 // constant oop => constant klass
2456 if (offset == java_lang_Class::array_klass_offset()) {
2457 if (t->is_void()) {
2458 // We cannot create a void array. Since void is a primitive type return null
2459 // klass. Users of this result need to do a null check on the returned klass.
2460 return TypePtr::NULL_PTR;
2461 }
2462 return TypeKlassPtr::make(ciArrayKlass::make(t), Type::trust_interfaces);
2463 }
2464 if (!t->is_klass()) {
2465 // a primitive Class (e.g., int.class) has null for a klass field
2466 return TypePtr::NULL_PTR;
2467 }
2468 // (Folds up the 1st indirection in aClassConstant.getModifiers().)
2469 return TypeKlassPtr::make(t->as_klass(), Type::trust_interfaces);
2470 }
2471 // non-constant mirror, so we can't tell what's going on
2472 }
2473 if (!tinst->is_loaded())
2474 return _type; // Bail out if not loaded
2475 if (offset == Type::klass_offset()) {
2476 return tinst->as_klass_type(true);
2477 }
2478 }
2479
2480 // Check for loading klass from an array
2481 const TypeAryPtr *tary = tp->isa_aryptr();
2482 if (tary != nullptr &&
2483 tary->offset() == Type::klass_offset()) {
2484 return tary->as_klass_type(true);
2485 }
2486
2487 // Check for loading klass from an array klass
2488 const TypeKlassPtr *tkls = tp->isa_klassptr();
2489 if (tkls != nullptr && !StressReflectiveCode) {
2490 if (!tkls->is_loaded())
2491 return _type; // Bail out if not loaded
2492 if (tkls->isa_aryklassptr() && tkls->is_aryklassptr()->elem()->isa_klassptr() &&
2493 tkls->offset() == in_bytes(ObjArrayKlass::element_klass_offset())) {
2494 // // Always returning precise element type is incorrect,
2495 // // e.g., element type could be object and array may contain strings
2496 // return TypeKlassPtr::make(TypePtr::Constant, elem, 0);
2497
2498 // The array's TypeKlassPtr was declared 'precise' or 'not precise'
2499 // according to the element type's subclassing.
2500 return tkls->is_aryklassptr()->elem()->isa_klassptr()->cast_to_exactness(tkls->klass_is_exact());
2501 }
2502 if (tkls->isa_instklassptr() != nullptr && tkls->klass_is_exact() &&
2503 tkls->offset() == in_bytes(Klass::super_offset())) {
2529 Node* x = LoadNode::Identity(phase);
2530 if (x != this) return x;
2531
2532 // Take apart the address into an oop and offset.
2533 // Return 'this' if we cannot.
2534 Node* adr = in(MemNode::Address);
2535 intptr_t offset = 0;
2536 Node* base = AddPNode::Ideal_base_and_offset(adr, phase, offset);
2537 if (base == nullptr) return this;
2538 const TypeOopPtr* toop = phase->type(adr)->isa_oopptr();
2539 if (toop == nullptr) return this;
2540
2541 // Step over potential GC barrier for OopHandle resolve
2542 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2543 if (bs->is_gc_barrier_node(base)) {
2544 base = bs->step_over_gc_barrier(base);
2545 }
2546
2547 // We can fetch the klass directly through an AllocateNode.
2548 // This works even if the klass is not constant (clone or newArray).
2549 if (offset == Type::klass_offset()) {
2550 Node* allocated_klass = AllocateNode::Ideal_klass(base, phase);
2551 if (allocated_klass != nullptr) {
2552 return allocated_klass;
2553 }
2554 }
2555
2556 // Simplify k.java_mirror.as_klass to plain k, where k is a Klass*.
2557 // See inline_native_Class_query for occurrences of these patterns.
2558 // Java Example: x.getClass().isAssignableFrom(y)
2559 //
2560 // This improves reflective code, often making the Class
2561 // mirror go completely dead. (Current exception: Class
2562 // mirrors may appear in debug info, but we could clean them out by
2563 // introducing a new debug info operator for Klass.java_mirror).
2564
2565 if (toop->isa_instptr() && toop->is_instptr()->instance_klass() == phase->C->env()->Class_klass()
2566 && offset == java_lang_Class::klass_offset()) {
2567 if (base->is_Load()) {
2568 Node* base2 = base->in(MemNode::Address);
2569 if (base2->is_Load()) { /* direct load of a load which is the OopHandle */
|