< prev index next >

src/hotspot/cpu/aarch64/c1_Runtime1_aarch64.cpp

Print this page

 633       {
 634         oop_maps = generate_handle_exception(id, sasm);
 635         __ leave();
 636         __ ret(lr);
 637       }
 638       break;
 639 
 640     case throw_div0_exception_id:
 641       { StubFrame f(sasm, "throw_div0_exception", dont_gc_arguments, does_not_return);
 642         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_div0_exception), false);
 643       }
 644       break;
 645 
 646     case throw_null_pointer_exception_id:
 647       { StubFrame f(sasm, "throw_null_pointer_exception", dont_gc_arguments, does_not_return);
 648         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_null_pointer_exception), false);
 649       }
 650       break;
 651 
 652     case new_instance_id:

 653     case fast_new_instance_id:
 654     case fast_new_instance_init_check_id:
 655       {
 656         Register klass = r3; // Incoming
 657         Register obj   = r0; // Result
 658 
 659         if (id == new_instance_id) {
 660           __ set_info("new_instance", dont_gc_arguments);


 661         } else if (id == fast_new_instance_id) {
 662           __ set_info("fast new_instance", dont_gc_arguments);
 663         } else {
 664           assert(id == fast_new_instance_init_check_id, "bad StubID");
 665           __ set_info("fast new_instance init check", dont_gc_arguments);
 666         }
 667 
 668         __ enter();
 669         OopMap* map = save_live_registers(sasm);
 670         int call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);





 671         oop_maps = new OopMapSet();
 672         oop_maps->add_gc_map(call_offset, map);
 673         restore_live_registers_except_r0(sasm);
 674         __ verify_oop(obj);
 675         __ leave();
 676         __ ret(lr);
 677 
 678         // r0,: new instance
 679       }
 680 
 681       break;
 682 
 683     case counter_overflow_id:
 684       {
 685         Register bci = r0, method = r1;
 686         __ enter();
 687         OopMap* map = save_live_registers(sasm);
 688         // Retrieve bci
 689         __ ldrw(bci, Address(rfp, 2*BytesPerWord));
 690         // And a pointer to the Method*
 691         __ ldr(method, Address(rfp, 3*BytesPerWord));
 692         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
 693         oop_maps = new OopMapSet();
 694         oop_maps->add_gc_map(call_offset, map);
 695         restore_live_registers(sasm);
 696         __ leave();
 697         __ ret(lr);
 698       }
 699       break;
 700 
 701     case new_type_array_id:
 702     case new_object_array_id:

 703       {
 704         Register length   = r19; // Incoming
 705         Register klass    = r3; // Incoming
 706         Register obj      = r0; // Result
 707 
 708         if (id == new_type_array_id) {
 709           __ set_info("new_type_array", dont_gc_arguments);
 710         } else {
 711           __ set_info("new_object_array", dont_gc_arguments);


 712         }
 713 
 714 #ifdef ASSERT
 715         // assert object type is really an array of the proper kind
 716         {
 717           Label ok;
 718           Register t0 = obj;
 719           __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
 720           __ asrw(t0, t0, Klass::_lh_array_tag_shift);
 721           int tag = ((id == new_type_array_id)
 722                      ? Klass::_lh_array_tag_type_value
 723                      : Klass::_lh_array_tag_obj_value);
 724           __ mov(rscratch1, tag);
 725           __ cmpw(t0, rscratch1);
 726           __ br(Assembler::EQ, ok);
 727           __ stop("assert(is an array klass)");
















 728           __ should_not_reach_here();
 729           __ bind(ok);
 730         }
 731 #endif // ASSERT
 732 
 733         __ enter();
 734         OopMap* map = save_live_registers(sasm);
 735         int call_offset;
 736         if (id == new_type_array_id) {
 737           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
 738         } else {
 739           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);



 740         }
 741 
 742         oop_maps = new OopMapSet();
 743         oop_maps->add_gc_map(call_offset, map);
 744         restore_live_registers_except_r0(sasm);
 745 
 746         __ verify_oop(obj);
 747         __ leave();
 748         __ ret(lr);
 749 
 750         // r0: new array
 751       }
 752       break;
 753 
 754     case new_multi_array_id:
 755       { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
 756         // r0,: klass
 757         // r19,: rank
 758         // r2: address of 1st dimension
 759         OopMap* map = save_live_registers(sasm);
 760         __ mov(c_rarg1, r0);
 761         __ mov(c_rarg3, r2);
 762         __ mov(c_rarg2, r19);
 763         int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
 764 
 765         oop_maps = new OopMapSet();
 766         oop_maps->add_gc_map(call_offset, map);
 767         restore_live_registers_except_r0(sasm);
 768 
 769         // r0,: new multi array
 770         __ verify_oop(r0);
 771       }
 772       break;
 773 






















































































 774     case register_finalizer_id:
 775       {
 776         __ set_info("register_finalizer", dont_gc_arguments);
 777 
 778         // This is called via call_runtime so the arguments
 779         // will be place in C abi locations
 780 
 781         __ verify_oop(c_rarg0);
 782 
 783         // load the klass and check the has finalizer flag
 784         Label register_finalizer;
 785         Register t = r5;
 786         __ load_klass(t, r0);
 787         __ ldrw(t, Address(t, Klass::access_flags_offset()));
 788         __ tbnz(t, exact_log2(JVM_ACC_HAS_FINALIZER), register_finalizer);
 789         __ ret(lr);
 790 
 791         __ bind(register_finalizer);
 792         __ enter();
 793         OopMap* oop_map = save_live_registers(sasm);
 794         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
 795         oop_maps = new OopMapSet();
 796         oop_maps->add_gc_map(call_offset, oop_map);
 797 
 798         // Now restore all the live registers
 799         restore_live_registers(sasm);
 800 
 801         __ leave();
 802         __ ret(lr);
 803       }
 804       break;
 805 
 806     case throw_class_cast_exception_id:
 807       { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
 808         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
 809       }
 810       break;
 811 
 812     case throw_incompatible_class_change_error_id:
 813       { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments, does_not_return);
 814         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
 815       }
 816       break;
 817 






 818     case slow_subtype_check_id:
 819       {
 820         // Typical calling sequence:
 821         // __ push(klass_RInfo);  // object klass or other subclass
 822         // __ push(sup_k_RInfo);  // array element klass or other superclass
 823         // __ bl(slow_subtype_check);
 824         // Note that the subclass is pushed first, and is therefore deepest.
 825         enum layout {
 826           r0_off, r0_off_hi,
 827           r2_off, r2_off_hi,
 828           r4_off, r4_off_hi,
 829           r5_off, r5_off_hi,
 830           sup_k_off, sup_k_off_hi,
 831           klass_off, klass_off_hi,
 832           framesize,
 833           result_off = sup_k_off
 834         };
 835 
 836         __ set_info("slow_subtype_check", dont_gc_arguments);
 837         __ push(RegSet::of(r0, r2, r4, r5), sp);

1001         __ leave();
1002         DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1003         assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1004 
1005         __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1006       }
1007       break;
1008 
1009     case dtrace_object_alloc_id:
1010       { // c_rarg0: object
1011         StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1012         save_live_registers(sasm);
1013 
1014         __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1015 
1016         restore_live_registers(sasm);
1017       }
1018       break;
1019 
1020     default:


1021       { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1022         __ mov(r0, (int)id);
1023         __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1024       }
1025       break;
1026     }
1027   }


1028   return oop_maps;
1029 }
1030 
1031 #undef __
1032 
1033 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); return 0; }

 633       {
 634         oop_maps = generate_handle_exception(id, sasm);
 635         __ leave();
 636         __ ret(lr);
 637       }
 638       break;
 639 
 640     case throw_div0_exception_id:
 641       { StubFrame f(sasm, "throw_div0_exception", dont_gc_arguments, does_not_return);
 642         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_div0_exception), false);
 643       }
 644       break;
 645 
 646     case throw_null_pointer_exception_id:
 647       { StubFrame f(sasm, "throw_null_pointer_exception", dont_gc_arguments, does_not_return);
 648         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_null_pointer_exception), false);
 649       }
 650       break;
 651 
 652     case new_instance_id:
 653     case new_instance_no_inline_id:
 654     case fast_new_instance_id:
 655     case fast_new_instance_init_check_id:
 656       {
 657         Register klass = r3; // Incoming
 658         Register obj   = r0; // Result
 659 
 660         if (id == new_instance_id) {
 661           __ set_info("new_instance", dont_gc_arguments);
 662         } else if (id == new_instance_no_inline_id) {
 663           __ set_info("new_instance_no_inline", dont_gc_arguments);
 664         } else if (id == fast_new_instance_id) {
 665           __ set_info("fast new_instance", dont_gc_arguments);
 666         } else {
 667           assert(id == fast_new_instance_init_check_id, "bad StubID");
 668           __ set_info("fast new_instance init check", dont_gc_arguments);
 669         }
 670 
 671         __ enter();
 672         OopMap* map = save_live_registers(sasm);
 673         int call_offset;
 674         if (id == new_instance_no_inline_id) {
 675           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance_no_inline), klass);
 676         } else {
 677           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
 678         }
 679         oop_maps = new OopMapSet();
 680         oop_maps->add_gc_map(call_offset, map);
 681         restore_live_registers_except_r0(sasm);
 682         __ verify_oop(obj);
 683         __ leave();
 684         __ ret(lr);
 685 
 686         // r0,: new instance
 687       }
 688 
 689       break;
 690 
 691     case counter_overflow_id:
 692       {
 693         Register bci = r0, method = r1;
 694         __ enter();
 695         OopMap* map = save_live_registers(sasm);
 696         // Retrieve bci
 697         __ ldrw(bci, Address(rfp, 2*BytesPerWord));
 698         // And a pointer to the Method*
 699         __ ldr(method, Address(rfp, 3*BytesPerWord));
 700         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
 701         oop_maps = new OopMapSet();
 702         oop_maps->add_gc_map(call_offset, map);
 703         restore_live_registers(sasm);
 704         __ leave();
 705         __ ret(lr);
 706       }
 707       break;
 708 
 709     case new_type_array_id:
 710     case new_object_array_id:
 711     case new_flat_array_id:
 712       {
 713         Register length   = r19; // Incoming
 714         Register klass    = r3; // Incoming
 715         Register obj      = r0; // Result
 716 
 717         if (id == new_type_array_id) {
 718           __ set_info("new_type_array", dont_gc_arguments);
 719         } else if (id == new_object_array_id) {
 720           __ set_info("new_object_array", dont_gc_arguments);
 721         } else {
 722           __ set_info("new_flat_array", dont_gc_arguments);
 723         }
 724 
 725 #ifdef ASSERT
 726         // assert object type is really an array of the proper kind
 727         {
 728           Label ok;
 729           Register t0 = obj;
 730           __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
 731           __ asrw(t0, t0, Klass::_lh_array_tag_shift);
 732           switch (id) {
 733           case new_type_array_id:
 734             __ cmpw(t0, Klass::_lh_array_tag_type_value);
 735             __ br(Assembler::EQ, ok);
 736             __ stop("assert(is a type array klass)");
 737             break;
 738           case new_object_array_id:
 739             __ cmpw(t0, Klass::_lh_array_tag_obj_value); // new "[Ljava/lang/Object;"
 740             __ br(Assembler::EQ, ok);
 741             __ cmpw(t0, Klass::_lh_array_tag_vt_value);  // new "[LVT;"
 742             __ br(Assembler::EQ, ok);
 743             __ stop("assert(is an object or inline type array klass)");
 744             break;
 745           case new_flat_array_id:
 746             // new "[QVT;"
 747             __ cmpw(t0, Klass::_lh_array_tag_vt_value);  // the array can be a flat array.
 748             __ br(Assembler::EQ, ok);
 749             __ cmpw(t0, Klass::_lh_array_tag_obj_value); // the array cannot be a flat array (due to InlineArrayElementMaxFlatSize, etc)
 750             __ br(Assembler::EQ, ok);
 751             __ stop("assert(is an object or inline type array klass)");
 752             break;
 753           default:  ShouldNotReachHere();
 754           }
 755           __ should_not_reach_here();
 756           __ bind(ok);
 757         }
 758 #endif // ASSERT
 759 
 760         __ enter();
 761         OopMap* map = save_live_registers(sasm);
 762         int call_offset;
 763         if (id == new_type_array_id) {
 764           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
 765         } else if (id == new_object_array_id) {
 766           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
 767         } else {
 768           assert(id == new_flat_array_id, "must be");
 769           call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_flat_array), klass, length);
 770         }
 771 
 772         oop_maps = new OopMapSet();
 773         oop_maps->add_gc_map(call_offset, map);
 774         restore_live_registers_except_r0(sasm);
 775 
 776         __ verify_oop(obj);
 777         __ leave();
 778         __ ret(lr);
 779 
 780         // r0: new array
 781       }
 782       break;
 783 
 784     case new_multi_array_id:
 785       { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
 786         // r0,: klass
 787         // r19,: rank
 788         // r2: address of 1st dimension
 789         OopMap* map = save_live_registers(sasm);
 790         __ mov(c_rarg1, r0);
 791         __ mov(c_rarg3, r2);
 792         __ mov(c_rarg2, r19);
 793         int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
 794 
 795         oop_maps = new OopMapSet();
 796         oop_maps->add_gc_map(call_offset, map);
 797         restore_live_registers_except_r0(sasm);
 798 
 799         // r0,: new multi array
 800         __ verify_oop(r0);
 801       }
 802       break;
 803 
 804     case buffer_inline_args_id:
 805     case buffer_inline_args_no_receiver_id:
 806       {
 807         const char* name = (id == buffer_inline_args_id) ?
 808           "buffer_inline_args" : "buffer_inline_args_no_receiver";
 809         StubFrame f(sasm, name, dont_gc_arguments);
 810         OopMap* map = save_live_registers(sasm);
 811         Register method = r19;   // Incoming
 812         address entry = (id == buffer_inline_args_id) ?
 813           CAST_FROM_FN_PTR(address, buffer_inline_args) :
 814           CAST_FROM_FN_PTR(address, buffer_inline_args_no_receiver);
 815         // This is called from a C1 method's scalarized entry point
 816         // where r0-r7 may be holding live argument values so we can't
 817         // return the result in r0 as the other stubs do. LR is used as
 818         // a temporay below to avoid the result being clobbered by
 819         // restore_live_registers.
 820         int call_offset = __ call_RT(lr, noreg, entry, method);
 821         oop_maps = new OopMapSet();
 822         oop_maps->add_gc_map(call_offset, map);
 823         restore_live_registers(sasm);
 824         __ mov(r20, lr);
 825         __ verify_oop(r20);  // r20: an array of buffered value objects
 826      }
 827      break;
 828 
 829     case load_flat_array_id:
 830       {
 831         StubFrame f(sasm, "load_flat_array", dont_gc_arguments);
 832         OopMap* map = save_live_registers(sasm);
 833 
 834         // Called with store_parameter and not C abi
 835 
 836         f.load_argument(1, r0); // r0,: array
 837         f.load_argument(0, r1); // r1,: index
 838         int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, load_flat_array), r0, r1);
 839 
 840         // Ensure the stores that initialize the buffer are visible
 841         // before any subsequent store that publishes this reference.
 842         __ membar(Assembler::StoreStore);
 843 
 844         oop_maps = new OopMapSet();
 845         oop_maps->add_gc_map(call_offset, map);
 846         restore_live_registers_except_r0(sasm);
 847 
 848         // r0: loaded element at array[index]
 849         __ verify_oop(r0);
 850       }
 851       break;
 852 
 853     case store_flat_array_id:
 854       {
 855         StubFrame f(sasm, "store_flat_array", dont_gc_arguments);
 856         OopMap* map = save_live_registers(sasm, 4);
 857 
 858         // Called with store_parameter and not C abi
 859 
 860         f.load_argument(2, r0); // r0: array
 861         f.load_argument(1, r1); // r1: index
 862         f.load_argument(0, r2); // r2: value
 863         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, store_flat_array), r0, r1, r2);
 864 
 865         oop_maps = new OopMapSet();
 866         oop_maps->add_gc_map(call_offset, map);
 867         restore_live_registers_except_r0(sasm);
 868       }
 869       break;
 870 
 871     case substitutability_check_id:
 872       {
 873         StubFrame f(sasm, "substitutability_check", dont_gc_arguments);
 874         OopMap* map = save_live_registers(sasm);
 875 
 876         // Called with store_parameter and not C abi
 877 
 878         f.load_argument(1, r1); // r1,: left
 879         f.load_argument(0, r2); // r2,: right
 880         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, substitutability_check), r1, r2);
 881 
 882         oop_maps = new OopMapSet();
 883         oop_maps->add_gc_map(call_offset, map);
 884         restore_live_registers_except_r0(sasm);
 885 
 886         // r0,: are the two operands substitutable
 887       }
 888       break;
 889 
 890     case register_finalizer_id:
 891       {
 892         __ set_info("register_finalizer", dont_gc_arguments);
 893 
 894         // This is called via call_runtime so the arguments
 895         // will be place in C abi locations
 896 
 897         __ verify_oop(c_rarg0);
 898 
 899         // load the klass and check the has finalizer flag
 900         Label register_finalizer;
 901         Register t = r5;
 902         __ load_klass(t, r0);
 903         __ ldrw(t, Address(t, Klass::access_flags_offset()));
 904         __ tbnz(t, exact_log2(JVM_ACC_HAS_FINALIZER), register_finalizer);
 905         __ ret(lr);
 906 
 907         __ bind(register_finalizer);
 908         __ enter();
 909         OopMap* oop_map = save_live_registers(sasm);
 910         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
 911         oop_maps = new OopMapSet();
 912         oop_maps->add_gc_map(call_offset, oop_map);
 913 
 914         // Now restore all the live registers
 915         restore_live_registers(sasm);
 916 
 917         __ leave();
 918         __ ret(lr);
 919       }
 920       break;
 921 
 922     case throw_class_cast_exception_id:
 923       { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
 924         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
 925       }
 926       break;
 927 
 928     case throw_incompatible_class_change_error_id:
 929       { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments, does_not_return);
 930         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
 931       }
 932       break;
 933 
 934     case throw_illegal_monitor_state_exception_id:
 935       { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
 936         oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
 937       }
 938       break;
 939 
 940     case slow_subtype_check_id:
 941       {
 942         // Typical calling sequence:
 943         // __ push(klass_RInfo);  // object klass or other subclass
 944         // __ push(sup_k_RInfo);  // array element klass or other superclass
 945         // __ bl(slow_subtype_check);
 946         // Note that the subclass is pushed first, and is therefore deepest.
 947         enum layout {
 948           r0_off, r0_off_hi,
 949           r2_off, r2_off_hi,
 950           r4_off, r4_off_hi,
 951           r5_off, r5_off_hi,
 952           sup_k_off, sup_k_off_hi,
 953           klass_off, klass_off_hi,
 954           framesize,
 955           result_off = sup_k_off
 956         };
 957 
 958         __ set_info("slow_subtype_check", dont_gc_arguments);
 959         __ push(RegSet::of(r0, r2, r4, r5), sp);

1123         __ leave();
1124         DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1125         assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1126 
1127         __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1128       }
1129       break;
1130 
1131     case dtrace_object_alloc_id:
1132       { // c_rarg0: object
1133         StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1134         save_live_registers(sasm);
1135 
1136         __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1137 
1138         restore_live_registers(sasm);
1139       }
1140       break;
1141 
1142     default:
1143       // FIXME: For unhandled trap_id this code fails with assert during vm intialization
1144       // rather than insert a call to unimplemented_entry
1145       { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1146         __ mov(r0, (int)id);
1147         __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1148       }
1149       break;
1150     }
1151   }
1152 
1153 
1154   return oop_maps;
1155 }
1156 
1157 #undef __
1158 
1159 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); return 0; }
< prev index next >