< prev index next >

src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page




 123   }
 124   ShouldNotReachHere();
 125   return Assembler::zero;
 126 }
 127 
 128 
 129 // Miscelaneous helper routines
 130 // Store an oop (or NULL) at the address described by obj.
 131 // If val == noreg this means store a NULL
 132 
 133 static void do_oop_store(InterpreterMacroAssembler* _masm,
 134                          Address obj,
 135                          Register val,
 136                          BarrierSet::Name barrier,
 137                          bool precise) {
 138   assert(val == noreg || val == rax, "parameter is just for looks");
 139   switch (barrier) {
 140 #if INCLUDE_ALL_GCS
 141     case BarrierSet::G1SATBCT:
 142     case BarrierSet::G1SATBCTLogging:

 143       {
 144         // flatten object address if needed
 145         if (obj.index() == noreg && obj.disp() == 0) {
 146           if (obj.base() != rdx) {
 147             __ movq(rdx, obj.base());
 148           }
 149         } else {
 150           __ leaq(rdx, obj);
 151         }
 152         __ g1_write_barrier_pre(rdx /* obj */,
 153                                 rbx /* pre_val */,
 154                                 r15_thread /* thread */,
 155                                 r8  /* tmp */,
 156                                 val != noreg /* tosca_live */,
 157                                 false /* expand_call */);
 158         if (val == noreg) {
 159           __ store_heap_oop_null(Address(rdx, 0));
 160         } else {
 161           // G1 barrier needs uncompressed oop for region cross check.
 162           Register new_val = val;
 163           if (UseCompressedOops) {
 164             new_val = rbx;
 165             __ movptr(new_val, val);
 166           }



 167           __ store_heap_oop(Address(rdx, 0), val);
 168           __ g1_write_barrier_post(rdx /* store_adr */,
 169                                    new_val /* new_val */,
 170                                    r15_thread /* thread */,
 171                                    r8 /* tmp */,
 172                                    rbx /* tmp2 */);
 173         }
 174       }
 175       break;
 176 #endif // INCLUDE_ALL_GCS
 177     case BarrierSet::CardTableModRef:
 178     case BarrierSet::CardTableExtension:
 179       {
 180         if (val == noreg) {
 181           __ store_heap_oop_null(obj);
 182         } else {
 183           __ store_heap_oop(obj, val);
 184           // flatten object address if needed
 185           if (!precise || (obj.index() == noreg && obj.disp() == 0)) {
 186             __ store_check(obj.base());


 591   transition(vtos, ftos);
 592   locals_index_wide(rbx);
 593   __ movflt(xmm0, faddress(rbx));
 594 }
 595 
 596 void TemplateTable::wide_dload() {
 597   transition(vtos, dtos);
 598   locals_index_wide(rbx);
 599   __ movdbl(xmm0, daddress(rbx));
 600 }
 601 
 602 void TemplateTable::wide_aload() {
 603   transition(vtos, atos);
 604   locals_index_wide(rbx);
 605   __ movptr(rax, aaddress(rbx));
 606 }
 607 
 608 void TemplateTable::index_check(Register array, Register index) {
 609   // destroys rbx
 610   // check array

 611   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 612   // sign extend index for use by indexed load
 613   __ movl2ptr(index, index);
 614   // check index
 615   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 616   if (index != rbx) {
 617     // ??? convention: move aberrant index into ebx for exception message
 618     assert(rbx != array, "different registers");
 619     __ movl(rbx, index);
 620   }
 621   __ jump_cc(Assembler::aboveEqual,
 622              ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 623 }
 624 
 625 void TemplateTable::iaload() {
 626   transition(itos, itos);
 627   __ pop_ptr(rdx);
 628   // eax: index
 629   // rdx: array
 630   index_check(rdx, rax); // kills rbx

 631   __ movl(rax, Address(rdx, rax,
 632                        Address::times_4,
 633                        arrayOopDesc::base_offset_in_bytes(T_INT)));
 634 }
 635 
 636 void TemplateTable::laload() {
 637   transition(itos, ltos);
 638   __ pop_ptr(rdx);
 639   // eax: index
 640   // rdx: array

 641   index_check(rdx, rax); // kills rbx
 642   __ movq(rax, Address(rdx, rbx,
 643                        Address::times_8,
 644                        arrayOopDesc::base_offset_in_bytes(T_LONG)));
 645 }
 646 
 647 void TemplateTable::faload() {
 648   transition(itos, ftos);
 649   __ pop_ptr(rdx);
 650   // eax: index
 651   // rdx: array
 652   index_check(rdx, rax); // kills rbx

 653   __ movflt(xmm0, Address(rdx, rax,
 654                          Address::times_4,
 655                          arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
 656 }
 657 
 658 void TemplateTable::daload() {
 659   transition(itos, dtos);
 660   __ pop_ptr(rdx);
 661   // eax: index
 662   // rdx: array
 663   index_check(rdx, rax); // kills rbx

 664   __ movdbl(xmm0, Address(rdx, rax,
 665                           Address::times_8,
 666                           arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
 667 }
 668 
 669 void TemplateTable::aaload() {
 670   transition(itos, atos);
 671   __ pop_ptr(rdx);
 672   // eax: index
 673   // rdx: array
 674   index_check(rdx, rax); // kills rbx

 675   __ load_heap_oop(rax, Address(rdx, rax,
 676                                 UseCompressedOops ? Address::times_4 : Address::times_8,
 677                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 678 }
 679 
 680 void TemplateTable::baload() {
 681   transition(itos, itos);
 682   __ pop_ptr(rdx);
 683   // eax: index
 684   // rdx: array
 685   index_check(rdx, rax); // kills rbx

 686   __ load_signed_byte(rax,
 687                       Address(rdx, rax,
 688                               Address::times_1,
 689                               arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 690 }
 691 
 692 void TemplateTable::caload() {
 693   transition(itos, itos);
 694   __ pop_ptr(rdx);
 695   // eax: index
 696   // rdx: array
 697   index_check(rdx, rax); // kills rbx

 698   __ load_unsigned_short(rax,
 699                          Address(rdx, rax,
 700                                  Address::times_2,
 701                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 702 }
 703 
 704 // iload followed by caload frequent pair
 705 void TemplateTable::fast_icaload() {
 706   transition(vtos, itos);
 707   // load index out of locals
 708   locals_index(rbx);
 709   __ movl(rax, iaddress(rbx));
 710 
 711   // eax: index
 712   // rdx: array
 713   __ pop_ptr(rdx);
 714   index_check(rdx, rax); // kills rbx

 715   __ load_unsigned_short(rax,
 716                          Address(rdx, rax,
 717                                  Address::times_2,
 718                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 719 }
 720 
 721 void TemplateTable::saload() {
 722   transition(itos, itos);
 723   __ pop_ptr(rdx);
 724   // eax: index
 725   // rdx: array
 726   index_check(rdx, rax); // kills rbx

 727   __ load_signed_short(rax,
 728                        Address(rdx, rax,
 729                                Address::times_2,
 730                                arrayOopDesc::base_offset_in_bytes(T_SHORT)));
 731 }
 732 
 733 void TemplateTable::iload(int n) {
 734   transition(vtos, itos);
 735   __ movl(rax, iaddress(n));
 736 }
 737 
 738 void TemplateTable::lload(int n) {
 739   transition(vtos, ltos);
 740   __ movq(rax, laddress(n));
 741 }
 742 
 743 void TemplateTable::fload(int n) {
 744   transition(vtos, ftos);
 745   __ movflt(xmm0, faddress(n));
 746 }


 891   __ pop_d();
 892   locals_index_wide(rbx);
 893   __ movdbl(daddress(rbx), xmm0);
 894 }
 895 
 896 void TemplateTable::wide_astore() {
 897   transition(vtos, vtos);
 898   __ pop_ptr(rax);
 899   locals_index_wide(rbx);
 900   __ movptr(aaddress(rbx), rax);
 901 }
 902 
 903 void TemplateTable::iastore() {
 904   transition(itos, vtos);
 905   __ pop_i(rbx);
 906   __ pop_ptr(rdx);
 907   // eax: value
 908   // ebx: index
 909   // rdx: array
 910   index_check(rdx, rbx); // prefer index in ebx

 911   __ movl(Address(rdx, rbx,
 912                   Address::times_4,
 913                   arrayOopDesc::base_offset_in_bytes(T_INT)),
 914           rax);
 915 }
 916 
 917 void TemplateTable::lastore() {
 918   transition(ltos, vtos);
 919   __ pop_i(rbx);
 920   __ pop_ptr(rdx);
 921   // rax: value
 922   // ebx: index
 923   // rdx: array
 924   index_check(rdx, rbx); // prefer index in ebx

 925   __ movq(Address(rdx, rbx,
 926                   Address::times_8,
 927                   arrayOopDesc::base_offset_in_bytes(T_LONG)),
 928           rax);
 929 }
 930 
 931 void TemplateTable::fastore() {
 932   transition(ftos, vtos);
 933   __ pop_i(rbx);
 934   __ pop_ptr(rdx);
 935   // xmm0: value
 936   // ebx:  index
 937   // rdx:  array
 938   index_check(rdx, rbx); // prefer index in ebx

 939   __ movflt(Address(rdx, rbx,
 940                    Address::times_4,
 941                    arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 942            xmm0);
 943 }
 944 
 945 void TemplateTable::dastore() {
 946   transition(dtos, vtos);
 947   __ pop_i(rbx);
 948   __ pop_ptr(rdx);
 949   // xmm0: value
 950   // ebx:  index
 951   // rdx:  array
 952   index_check(rdx, rbx); // prefer index in ebx

 953   __ movdbl(Address(rdx, rbx,
 954                    Address::times_8,
 955                    arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 956            xmm0);
 957 }
 958 
 959 void TemplateTable::aastore() {
 960   Label is_null, ok_is_subtype, done;
 961   transition(vtos, vtos);
 962   // stack: ..., array, index, value
 963   __ movptr(rax, at_tos());    // value
 964   __ movl(rcx, at_tos_p1()); // index
 965   __ movptr(rdx, at_tos_p2()); // array
 966 
 967   Address element_address(rdx, rcx,
 968                           UseCompressedOops? Address::times_4 : Address::times_8,
 969                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 970 
 971   index_check(rdx, rcx);     // kills rbx

 972   // do array store check - check for NULL value first
 973   __ testptr(rax, rax);
 974   __ jcc(Assembler::zero, is_null);
 975 
 976   // Move subklass into rbx
 977   __ load_klass(rbx, rax);
 978   // Move superklass into rax
 979   __ load_klass(rax, rdx);
 980   __ movptr(rax, Address(rax,
 981                          ObjArrayKlass::element_klass_offset()));
 982   // Compress array + index*oopSize + 12 into a single register.  Frees rcx.
 983   __ lea(rdx, element_address);
 984 
 985   // Generate subtype check.  Blows rcx, rdi
 986   // Superklass in rax.  Subklass in rbx.
 987   __ gen_subtype_check(rbx, ok_is_subtype);
 988 
 989   // Come here on failure
 990   // object is at TOS
 991   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));


1002   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1003   __ bind(is_null);
1004   __ profile_null_seen(rbx);
1005 
1006   // Store a NULL
1007   do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1008 
1009   // Pop stack arguments
1010   __ bind(done);
1011   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1012 }
1013 
1014 void TemplateTable::bastore() {
1015   transition(itos, vtos);
1016   __ pop_i(rbx);
1017   __ pop_ptr(rdx);
1018   // eax: value
1019   // ebx: index
1020   // rdx: array
1021   index_check(rdx, rbx); // prefer index in ebx

1022   // Need to check whether array is boolean or byte
1023   // since both types share the bastore bytecode.
1024   __ load_klass(rcx, rdx);
1025   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1026   int diffbit = Klass::layout_helper_boolean_diffbit();
1027   __ testl(rcx, diffbit);
1028   Label L_skip;
1029   __ jccb(Assembler::zero, L_skip);
1030   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1031   __ bind(L_skip);
1032   __ movb(Address(rdx, rbx,
1033                   Address::times_1,
1034                   arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1035           rax);
1036 }
1037 
1038 void TemplateTable::castore() {
1039   transition(itos, vtos);
1040   __ pop_i(rbx);
1041   __ pop_ptr(rdx);
1042   // eax: value
1043   // ebx: index
1044   // rdx: array
1045   index_check(rdx, rbx);  // prefer index in ebx

1046   __ movw(Address(rdx, rbx,
1047                   Address::times_2,
1048                   arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1049           rax);
1050 }
1051 
1052 void TemplateTable::sastore() {
1053   castore();
1054 }
1055 
1056 void TemplateTable::istore(int n) {
1057   transition(itos, vtos);
1058   __ movl(iaddress(n), rax);
1059 }
1060 
1061 void TemplateTable::lstore(int n) {
1062   transition(ltos, vtos);
1063   __ movq(laddress(n), rax);
1064 }
1065 


1830   __ bind(not_taken);
1831   __ profile_not_taken_branch(rax);
1832 }
1833 
1834 void TemplateTable::if_nullcmp(Condition cc) {
1835   transition(atos, vtos);
1836   // assume branch is more often taken than not (loops use backward branches)
1837   Label not_taken;
1838   __ testptr(rax, rax);
1839   __ jcc(j_not(cc), not_taken);
1840   branch(false, false);
1841   __ bind(not_taken);
1842   __ profile_not_taken_branch(rax);
1843 }
1844 
1845 void TemplateTable::if_acmp(Condition cc) {
1846   transition(atos, vtos);
1847   // assume branch is more often taken than not (loops use backward branches)
1848   Label not_taken;
1849   __ pop_ptr(rdx);
1850   __ cmpptr(rdx, rax);
1851   __ jcc(j_not(cc), not_taken);
1852   branch(false, false);
1853   __ bind(not_taken);
1854   __ profile_not_taken_branch(rax);
1855 }
1856 
1857 void TemplateTable::ret() {
1858   transition(vtos, vtos);
1859   locals_index(rbx);
1860   __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1861   __ profile_ret(rbx, rcx);
1862   __ get_method(rax);
1863   __ movptr(r13, Address(rax, Method::const_offset()));
1864   __ lea(r13, Address(r13, rbx, Address::times_1,
1865                       ConstMethod::codes_offset()));
1866   __ dispatch_next(vtos);
1867 }
1868 
1869 void TemplateTable::wide_ret() {
1870   transition(vtos, vtos);


2287 }
2288 
2289 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2290   transition(vtos, vtos);
2291 
2292   const Register cache = rcx;
2293   const Register index = rdx;
2294   const Register obj   = c_rarg3;
2295   const Register off   = rbx;
2296   const Register flags = rax;
2297   const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2298 
2299   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2300   jvmti_post_field_access(cache, index, is_static, false);
2301   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2302 
2303   if (!is_static) {
2304     // obj is on the stack
2305     pop_and_check_object(obj);
2306   }

2307 
2308   const Address field(obj, off, Address::times_1);
2309 
2310   Label Done, notByte, notBool, notInt, notShort, notChar,
2311               notLong, notFloat, notObj, notDouble;
2312 
2313   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2314   // Make sure we don't need to mask edx after the above shift
2315   assert(btos == 0, "change code, btos != 0");
2316 
2317   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2318   __ jcc(Assembler::notZero, notByte);
2319   // btos
2320   __ load_signed_byte(rax, field);
2321   __ push(btos);
2322   // Rewrite bytecode to be faster
2323   if (!is_static) {
2324     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2325   }
2326   __ jmp(Done);


2525   __ movl(rdx, flags);
2526   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2527   __ andl(rdx, 0x1);
2528 
2529   // field address
2530   const Address field(obj, off, Address::times_1);
2531 
2532   Label notByte, notBool, notInt, notShort, notChar,
2533         notLong, notFloat, notObj, notDouble;
2534 
2535   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2536 
2537   assert(btos == 0, "change code, btos != 0");
2538   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2539   __ jcc(Assembler::notZero, notByte);
2540 
2541   // btos
2542   {
2543     __ pop(btos);
2544     if (!is_static) pop_and_check_object(obj);

2545     __ movb(field, rax);
2546     if (!is_static) {
2547       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2548     }
2549     __ jmp(Done);
2550   }
2551 
2552   __ bind(notByte);
2553   __ cmpl(flags, ztos);
2554   __ jcc(Assembler::notEqual, notBool);
2555 
2556   // ztos
2557   {
2558     __ pop(ztos);
2559     if (!is_static) pop_and_check_object(obj);

2560     __ andl(rax, 0x1);
2561     __ movb(field, rax);
2562     if (!is_static) {
2563       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2564     }
2565     __ jmp(Done);
2566   }
2567 
2568   __ bind(notBool);
2569   __ cmpl(flags, atos);
2570   __ jcc(Assembler::notEqual, notObj);
2571 
2572   // atos
2573   {
2574     __ pop(atos);
2575     if (!is_static) pop_and_check_object(obj);

2576     // Store into the field
2577     do_oop_store(_masm, field, rax, _bs->kind(), false);
2578     if (!is_static) {
2579       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2580     }
2581     __ jmp(Done);
2582   }
2583 
2584   __ bind(notObj);
2585   __ cmpl(flags, itos);
2586   __ jcc(Assembler::notEqual, notInt);
2587 
2588   // itos
2589   {
2590     __ pop(itos);
2591     if (!is_static) pop_and_check_object(obj);

2592     __ movl(field, rax);
2593     if (!is_static) {
2594       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2595     }
2596     __ jmp(Done);
2597   }
2598 
2599   __ bind(notInt);
2600   __ cmpl(flags, ctos);
2601   __ jcc(Assembler::notEqual, notChar);
2602 
2603   // ctos
2604   {
2605     __ pop(ctos);
2606     if (!is_static) pop_and_check_object(obj);

2607     __ movw(field, rax);
2608     if (!is_static) {
2609       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2610     }
2611     __ jmp(Done);
2612   }
2613 
2614   __ bind(notChar);
2615   __ cmpl(flags, stos);
2616   __ jcc(Assembler::notEqual, notShort);
2617 
2618   // stos
2619   {
2620     __ pop(stos);
2621     if (!is_static) pop_and_check_object(obj);

2622     __ movw(field, rax);
2623     if (!is_static) {
2624       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2625     }
2626     __ jmp(Done);
2627   }
2628 
2629   __ bind(notShort);
2630   __ cmpl(flags, ltos);
2631   __ jcc(Assembler::notEqual, notLong);
2632 
2633   // ltos
2634   {
2635     __ pop(ltos);
2636     if (!is_static) pop_and_check_object(obj);

2637     __ movq(field, rax);
2638     if (!is_static) {
2639       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2640     }
2641     __ jmp(Done);
2642   }
2643 
2644   __ bind(notLong);
2645   __ cmpl(flags, ftos);
2646   __ jcc(Assembler::notEqual, notFloat);
2647 
2648   // ftos
2649   {
2650     __ pop(ftos);
2651     if (!is_static) pop_and_check_object(obj);

2652     __ movflt(field, xmm0);
2653     if (!is_static) {
2654       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2655     }
2656     __ jmp(Done);
2657   }
2658 
2659   __ bind(notFloat);
2660 #ifdef ASSERT
2661   __ cmpl(flags, dtos);
2662   __ jcc(Assembler::notEqual, notDouble);
2663 #endif
2664 
2665   // dtos
2666   {
2667     __ pop(dtos);
2668     if (!is_static) pop_and_check_object(obj);

2669     __ movdbl(field, xmm0);
2670     if (!is_static) {
2671       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2672     }
2673   }
2674 
2675 #ifdef ASSERT
2676   __ jmp(Done);
2677 
2678   __ bind(notDouble);
2679   __ stop("Bad state");
2680 #endif
2681 
2682   __ bind(Done);
2683 
2684   // Check for volatile store
2685   __ testl(rdx, rdx);
2686   __ jcc(Assembler::zero, notVolatile);
2687   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2688                                                Assembler::StoreStore));


2764 
2765   // test for volatile with rdx
2766   __ movl(rdx, Address(rcx, rbx, Address::times_8,
2767                        in_bytes(base +
2768                                 ConstantPoolCacheEntry::flags_offset())));
2769 
2770   // replace index with field offset from cache entry
2771   __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2772                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2773 
2774   // [jk] not needed currently
2775   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2776   //                                              Assembler::StoreStore));
2777 
2778   Label notVolatile;
2779   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2780   __ andl(rdx, 0x1);
2781 
2782   // Get object from stack
2783   pop_and_check_object(rcx);

2784 
2785   // field address
2786   const Address field(rcx, rbx, Address::times_1);
2787 
2788   // access field
2789   switch (bytecode()) {
2790   case Bytecodes::_fast_aputfield:
2791     do_oop_store(_masm, field, rax, _bs->kind(), false);
2792     break;
2793   case Bytecodes::_fast_lputfield:
2794     __ movq(field, rax);
2795     break;
2796   case Bytecodes::_fast_iputfield:
2797     __ movl(field, rax);
2798     break;
2799   case Bytecodes::_fast_zputfield:
2800     __ andl(rax, 0x1);  // boolean is true if LSB is 1
2801     // fall through to bputfield
2802   case Bytecodes::_fast_bputfield:
2803     __ movb(field, rax);


2853   }
2854 
2855   // access constant pool cache
2856   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2857   // replace index with field offset from cache entry
2858   // [jk] not needed currently
2859   // if (os::is_MP()) {
2860   //   __ movl(rdx, Address(rcx, rbx, Address::times_8,
2861   //                        in_bytes(ConstantPoolCache::base_offset() +
2862   //                                 ConstantPoolCacheEntry::flags_offset())));
2863   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2864   //   __ andl(rdx, 0x1);
2865   // }
2866   __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2867                          in_bytes(ConstantPoolCache::base_offset() +
2868                                   ConstantPoolCacheEntry::f2_offset())));
2869 
2870   // rax: object
2871   __ verify_oop(rax);
2872   __ null_check(rax);

2873   Address field(rax, rbx, Address::times_1);
2874 
2875   // access field
2876   switch (bytecode()) {
2877   case Bytecodes::_fast_agetfield:
2878     __ load_heap_oop(rax, field);
2879     __ verify_oop(rax);
2880     break;
2881   case Bytecodes::_fast_lgetfield:
2882     __ movq(rax, field);
2883     break;
2884   case Bytecodes::_fast_igetfield:
2885     __ movl(rax, field);
2886     break;
2887   case Bytecodes::_fast_bgetfield:
2888     __ movsbl(rax, field);
2889     break;
2890   case Bytecodes::_fast_sgetfield:
2891     __ load_signed_short(rax, field);
2892     break;


2910   //   __ membar(Assembler::LoadLoad);
2911   //   __ bind(notVolatile);
2912   //};
2913 }
2914 
2915 void TemplateTable::fast_xaccess(TosState state) {
2916   transition(vtos, state);
2917 
2918   // get receiver
2919   __ movptr(rax, aaddress(0));
2920   // access constant pool cache
2921   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2922   __ movptr(rbx,
2923             Address(rcx, rdx, Address::times_8,
2924                     in_bytes(ConstantPoolCache::base_offset() +
2925                              ConstantPoolCacheEntry::f2_offset())));
2926   // make sure exception is reported in correct bcp range (getfield is
2927   // next instruction)
2928   __ increment(r13);
2929   __ null_check(rax);

2930   switch (state) {
2931   case itos:
2932     __ movl(rax, Address(rax, rbx, Address::times_1));
2933     break;
2934   case atos:
2935     __ load_heap_oop(rax, Address(rax, rbx, Address::times_1));
2936     __ verify_oop(rax);
2937     break;
2938   case ftos:
2939     __ movflt(xmm0, Address(rax, rbx, Address::times_1));
2940     break;
2941   default:
2942     ShouldNotReachHere();
2943   }
2944 
2945   // [jk] not needed currently
2946   // if (os::is_MP()) {
2947   //   Label notVolatile;
2948   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
2949   //                        in_bytes(ConstantPoolCache::base_offset() +


3347   __ jcc(Assembler::notEqual, slow_case);
3348 
3349   // get instance_size in InstanceKlass (scaled to a count of bytes)
3350   __ movl(rdx,
3351           Address(rsi,
3352                   Klass::layout_helper_offset()));
3353   // test to see if it has a finalizer or is malformed in some way
3354   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3355   __ jcc(Assembler::notZero, slow_case);
3356 
3357   // Allocate the instance
3358   // 1) Try to allocate in the TLAB
3359   // 2) if fail and the object is large allocate in the shared Eden
3360   // 3) if the above fails (or is not applicable), go to a slow case
3361   // (creates a new TLAB, etc.)
3362 
3363   const bool allow_shared_alloc =
3364     Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3365 
3366   if (UseTLAB) {





3367     __ movptr(rax, Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())));
3368     __ lea(rbx, Address(rax, rdx, Address::times_1));
3369     __ cmpptr(rbx, Address(r15_thread, in_bytes(JavaThread::tlab_end_offset())));
3370     __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3371     __ movptr(Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())), rbx);

3372     if (ZeroTLAB) {
3373       // the fields have been already cleared
3374       __ jmp(initialize_header);
3375     } else {
3376       // initialize both the header and fields
3377       __ jmp(initialize_object);
3378     }
3379   }
3380 
3381   // Allocation in the shared Eden, if allowed.
3382   //
3383   // rdx: instance size in bytes
3384   if (allow_shared_alloc) {
3385     __ bind(allocate_shared);
3386 
3387     ExternalAddress top((address)Universe::heap()->top_addr());
3388     ExternalAddress end((address)Universe::heap()->end_addr());
3389 
3390     const Register RtopAddr = rscratch1;
3391     const Register RendAddr = rscratch2;


3653 // Note: monitorenter & exit are symmetric routines; which is reflected
3654 //       in the assembly code structure as well
3655 //
3656 // Stack layout:
3657 //
3658 // [expressions  ] <--- rsp               = expression stack top
3659 // ..
3660 // [expressions  ]
3661 // [monitor entry] <--- monitor block top = expression stack bot
3662 // ..
3663 // [monitor entry]
3664 // [frame data   ] <--- monitor block bot
3665 // ...
3666 // [saved rbp    ] <--- rbp
3667 void TemplateTable::monitorenter() {
3668   transition(atos, vtos);
3669 
3670   // check for NULL object
3671   __ null_check(rax);
3672 





3673   const Address monitor_block_top(
3674         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3675   const Address monitor_block_bot(
3676         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3677   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3678 
3679   Label allocated;
3680 
3681   // initialize entry pointer
3682   __ xorl(c_rarg1, c_rarg1); // points to free slot or NULL
3683 
3684   // find a free slot in the monitor block (result in c_rarg1)
3685   {
3686     Label entry, loop, exit;
3687     __ movptr(c_rarg3, monitor_block_top); // points to current entry,
3688                                      // starting with top-most entry
3689     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3690                                      // of monitor block
3691     __ jmpb(entry);
3692 
3693     __ bind(loop);
3694     // check if current entry is used
3695     __ cmpptr(Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD);
3696     // if not used then remember entry in c_rarg1
3697     __ cmov(Assembler::equal, c_rarg1, c_rarg3);
3698     // check if current entry is for same object
3699     __ cmpptr(rax, Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()));
3700     // if same object then stop searching
3701     __ jccb(Assembler::equal, exit);
3702     // otherwise advance to next entry
3703     __ addptr(c_rarg3, entry_size);
3704     __ bind(entry);
3705     // check if bottom reached
3706     __ cmpptr(c_rarg3, c_rarg2);
3707     // if not at bottom then check this entry
3708     __ jcc(Assembler::notEqual, loop);
3709     __ bind(exit);
3710   }
3711 


3747   // store object
3748   __ movptr(Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()), rax);
3749   __ lock_object(c_rarg1);
3750 
3751   // check to make sure this monitor doesn't cause stack overflow after locking
3752   __ save_bcp();  // in case of exception
3753   __ generate_stack_overflow_check(0);
3754 
3755   // The bcp has already been incremented. Just need to dispatch to
3756   // next instruction.
3757   __ dispatch_next(vtos);
3758 }
3759 
3760 
3761 void TemplateTable::monitorexit() {
3762   transition(atos, vtos);
3763 
3764   // check for NULL object
3765   __ null_check(rax);
3766 





3767   const Address monitor_block_top(
3768         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3769   const Address monitor_block_bot(
3770         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3771   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3772 
3773   Label found;
3774 
3775   // find matching slot
3776   {
3777     Label entry, loop;
3778     __ movptr(c_rarg1, monitor_block_top); // points to current entry,
3779                                      // starting with top-most entry
3780     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3781                                      // of monitor block
3782     __ jmpb(entry);
3783 
3784     __ bind(loop);
3785     // check if current entry is for same object
3786     __ cmpptr(rax, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
3787     // if same object then stop searching
3788     __ jcc(Assembler::equal, found);
3789     // otherwise advance to next entry
3790     __ addptr(c_rarg1, entry_size);
3791     __ bind(entry);
3792     // check if bottom reached
3793     __ cmpptr(c_rarg1, c_rarg2);
3794     // if not at bottom then check this entry
3795     __ jcc(Assembler::notEqual, loop);
3796   }
3797 
3798   // error handling. Unlocking was not block-structured
3799   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3800                    InterpreterRuntime::throw_illegal_monitor_state_exception));
3801   __ should_not_reach_here();
3802 




 123   }
 124   ShouldNotReachHere();
 125   return Assembler::zero;
 126 }
 127 
 128 
 129 // Miscelaneous helper routines
 130 // Store an oop (or NULL) at the address described by obj.
 131 // If val == noreg this means store a NULL
 132 
 133 static void do_oop_store(InterpreterMacroAssembler* _masm,
 134                          Address obj,
 135                          Register val,
 136                          BarrierSet::Name barrier,
 137                          bool precise) {
 138   assert(val == noreg || val == rax, "parameter is just for looks");
 139   switch (barrier) {
 140 #if INCLUDE_ALL_GCS
 141     case BarrierSet::G1SATBCT:
 142     case BarrierSet::G1SATBCTLogging:
 143     case BarrierSet::ShenandoahBarrierSet:
 144       {
 145         // flatten object address if needed
 146         if (obj.index() == noreg && obj.disp() == 0) {
 147           if (obj.base() != rdx) {
 148             __ movq(rdx, obj.base());
 149           }
 150         } else {
 151           __ leaq(rdx, obj);
 152         }
 153         __ g1_write_barrier_pre(rdx /* obj */,
 154                                 rbx /* pre_val */,
 155                                 r15_thread /* thread */,
 156                                 r8  /* tmp */,
 157                                 val != noreg /* tosca_live */,
 158                                 false /* expand_call */);
 159         if (val == noreg) {
 160           __ store_heap_oop_null(Address(rdx, 0));
 161         } else {
 162           // G1 barrier needs uncompressed oop for region cross check.
 163           Register new_val = val;
 164           if (UseCompressedOops) {
 165             new_val = rbx;
 166             __ movptr(new_val, val);
 167           }
 168           // For Shenandoah, make sure we only store refs into to-space.
 169           oopDesc::bs()->interpreter_read_barrier(_masm, val);
 170 
 171           __ store_heap_oop(Address(rdx, 0), val);
 172           __ g1_write_barrier_post(rdx /* store_adr */,
 173                                    new_val /* new_val */,
 174                                    r15_thread /* thread */,
 175                                    r8 /* tmp */,
 176                                    rbx /* tmp2 */);
 177         }
 178       }
 179       break;
 180 #endif // INCLUDE_ALL_GCS
 181     case BarrierSet::CardTableModRef:
 182     case BarrierSet::CardTableExtension:
 183       {
 184         if (val == noreg) {
 185           __ store_heap_oop_null(obj);
 186         } else {
 187           __ store_heap_oop(obj, val);
 188           // flatten object address if needed
 189           if (!precise || (obj.index() == noreg && obj.disp() == 0)) {
 190             __ store_check(obj.base());


 595   transition(vtos, ftos);
 596   locals_index_wide(rbx);
 597   __ movflt(xmm0, faddress(rbx));
 598 }
 599 
 600 void TemplateTable::wide_dload() {
 601   transition(vtos, dtos);
 602   locals_index_wide(rbx);
 603   __ movdbl(xmm0, daddress(rbx));
 604 }
 605 
 606 void TemplateTable::wide_aload() {
 607   transition(vtos, atos);
 608   locals_index_wide(rbx);
 609   __ movptr(rax, aaddress(rbx));
 610 }
 611 
 612 void TemplateTable::index_check(Register array, Register index) {
 613   // destroys rbx
 614   // check array
 615 
 616   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 617   // sign extend index for use by indexed load
 618   __ movl2ptr(index, index);
 619   // check index
 620   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 621   if (index != rbx) {
 622     // ??? convention: move aberrant index into ebx for exception message
 623     assert(rbx != array, "different registers");
 624     __ movl(rbx, index);
 625   }
 626   __ jump_cc(Assembler::aboveEqual,
 627              ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 628 }
 629 
 630 void TemplateTable::iaload() {
 631   transition(itos, itos);
 632   __ pop_ptr(rdx);
 633   // eax: index
 634   // rdx: array
 635   index_check(rdx, rax); // kills rbx
 636   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 637   __ movl(rax, Address(rdx, rax,
 638                        Address::times_4,
 639                        arrayOopDesc::base_offset_in_bytes(T_INT)));
 640 }
 641 
 642 void TemplateTable::laload() {
 643   transition(itos, ltos);
 644   __ pop_ptr(rdx);
 645   // eax: index
 646   // rdx: array
 647   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 648   index_check(rdx, rax); // kills rbx
 649   __ movq(rax, Address(rdx, rbx,
 650                        Address::times_8,
 651                        arrayOopDesc::base_offset_in_bytes(T_LONG)));
 652 }
 653 
 654 void TemplateTable::faload() {
 655   transition(itos, ftos);
 656   __ pop_ptr(rdx);
 657   // eax: index
 658   // rdx: array
 659   index_check(rdx, rax); // kills rbx
 660   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 661   __ movflt(xmm0, Address(rdx, rax,
 662                          Address::times_4,
 663                          arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
 664 }
 665 
 666 void TemplateTable::daload() {
 667   transition(itos, dtos);
 668   __ pop_ptr(rdx);
 669   // eax: index
 670   // rdx: array
 671   index_check(rdx, rax); // kills rbx
 672   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 673   __ movdbl(xmm0, Address(rdx, rax,
 674                           Address::times_8,
 675                           arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
 676 }
 677 
 678 void TemplateTable::aaload() {
 679   transition(itos, atos);
 680   __ pop_ptr(rdx);
 681   // eax: index
 682   // rdx: array
 683   index_check(rdx, rax); // kills rbx
 684   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 685   __ load_heap_oop(rax, Address(rdx, rax,
 686                                 UseCompressedOops ? Address::times_4 : Address::times_8,
 687                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 688 }
 689 
 690 void TemplateTable::baload() {
 691   transition(itos, itos);
 692   __ pop_ptr(rdx);
 693   // eax: index
 694   // rdx: array
 695   index_check(rdx, rax); // kills rbx
 696   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 697   __ load_signed_byte(rax,
 698                       Address(rdx, rax,
 699                               Address::times_1,
 700                               arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 701 }
 702 
 703 void TemplateTable::caload() {
 704   transition(itos, itos);
 705   __ pop_ptr(rdx);
 706   // eax: index
 707   // rdx: array
 708   index_check(rdx, rax); // kills rbx
 709   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 710   __ load_unsigned_short(rax,
 711                          Address(rdx, rax,
 712                                  Address::times_2,
 713                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 714 }
 715 
 716 // iload followed by caload frequent pair
 717 void TemplateTable::fast_icaload() {
 718   transition(vtos, itos);
 719   // load index out of locals
 720   locals_index(rbx);
 721   __ movl(rax, iaddress(rbx));
 722 
 723   // eax: index
 724   // rdx: array
 725   __ pop_ptr(rdx);
 726   index_check(rdx, rax); // kills rbx
 727   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 728   __ load_unsigned_short(rax,
 729                          Address(rdx, rax,
 730                                  Address::times_2,
 731                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 732 }
 733 
 734 void TemplateTable::saload() {
 735   transition(itos, itos);
 736   __ pop_ptr(rdx);
 737   // eax: index
 738   // rdx: array
 739   index_check(rdx, rax); // kills rbx
 740   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
 741   __ load_signed_short(rax,
 742                        Address(rdx, rax,
 743                                Address::times_2,
 744                                arrayOopDesc::base_offset_in_bytes(T_SHORT)));
 745 }
 746 
 747 void TemplateTable::iload(int n) {
 748   transition(vtos, itos);
 749   __ movl(rax, iaddress(n));
 750 }
 751 
 752 void TemplateTable::lload(int n) {
 753   transition(vtos, ltos);
 754   __ movq(rax, laddress(n));
 755 }
 756 
 757 void TemplateTable::fload(int n) {
 758   transition(vtos, ftos);
 759   __ movflt(xmm0, faddress(n));
 760 }


 905   __ pop_d();
 906   locals_index_wide(rbx);
 907   __ movdbl(daddress(rbx), xmm0);
 908 }
 909 
 910 void TemplateTable::wide_astore() {
 911   transition(vtos, vtos);
 912   __ pop_ptr(rax);
 913   locals_index_wide(rbx);
 914   __ movptr(aaddress(rbx), rax);
 915 }
 916 
 917 void TemplateTable::iastore() {
 918   transition(itos, vtos);
 919   __ pop_i(rbx);
 920   __ pop_ptr(rdx);
 921   // eax: value
 922   // ebx: index
 923   // rdx: array
 924   index_check(rdx, rbx); // prefer index in ebx
 925   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
 926   __ movl(Address(rdx, rbx,
 927                   Address::times_4,
 928                   arrayOopDesc::base_offset_in_bytes(T_INT)),
 929           rax);
 930 }
 931 
 932 void TemplateTable::lastore() {
 933   transition(ltos, vtos);
 934   __ pop_i(rbx);
 935   __ pop_ptr(rdx);
 936   // rax: value
 937   // ebx: index
 938   // rdx: array
 939   index_check(rdx, rbx); // prefer index in ebx
 940   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
 941   __ movq(Address(rdx, rbx,
 942                   Address::times_8,
 943                   arrayOopDesc::base_offset_in_bytes(T_LONG)),
 944           rax);
 945 }
 946 
 947 void TemplateTable::fastore() {
 948   transition(ftos, vtos);
 949   __ pop_i(rbx);
 950   __ pop_ptr(rdx);
 951   // xmm0: value
 952   // ebx:  index
 953   // rdx:  array
 954   index_check(rdx, rbx); // prefer index in ebx
 955   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
 956   __ movflt(Address(rdx, rbx,
 957                    Address::times_4,
 958                    arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 959            xmm0);
 960 }
 961 
 962 void TemplateTable::dastore() {
 963   transition(dtos, vtos);
 964   __ pop_i(rbx);
 965   __ pop_ptr(rdx);
 966   // xmm0: value
 967   // ebx:  index
 968   // rdx:  array
 969   index_check(rdx, rbx); // prefer index in ebx
 970   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
 971   __ movdbl(Address(rdx, rbx,
 972                    Address::times_8,
 973                    arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 974            xmm0);
 975 }
 976 
 977 void TemplateTable::aastore() {
 978   Label is_null, ok_is_subtype, done;
 979   transition(vtos, vtos);
 980   // stack: ..., array, index, value
 981   __ movptr(rax, at_tos());    // value
 982   __ movl(rcx, at_tos_p1()); // index
 983   __ movptr(rdx, at_tos_p2()); // array
 984 
 985   Address element_address(rdx, rcx,
 986                           UseCompressedOops? Address::times_4 : Address::times_8,
 987                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 988 
 989   index_check(rdx, rcx);     // kills rbx
 990   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
 991   // do array store check - check for NULL value first
 992   __ testptr(rax, rax);
 993   __ jcc(Assembler::zero, is_null);
 994 
 995   // Move subklass into rbx
 996   __ load_klass(rbx, rax);
 997   // Move superklass into rax
 998   __ load_klass(rax, rdx);
 999   __ movptr(rax, Address(rax,
1000                          ObjArrayKlass::element_klass_offset()));
1001   // Compress array + index*oopSize + 12 into a single register.  Frees rcx.
1002   __ lea(rdx, element_address);
1003 
1004   // Generate subtype check.  Blows rcx, rdi
1005   // Superklass in rax.  Subklass in rbx.
1006   __ gen_subtype_check(rbx, ok_is_subtype);
1007 
1008   // Come here on failure
1009   // object is at TOS
1010   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));


1021   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1022   __ bind(is_null);
1023   __ profile_null_seen(rbx);
1024 
1025   // Store a NULL
1026   do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1027 
1028   // Pop stack arguments
1029   __ bind(done);
1030   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1031 }
1032 
1033 void TemplateTable::bastore() {
1034   transition(itos, vtos);
1035   __ pop_i(rbx);
1036   __ pop_ptr(rdx);
1037   // eax: value
1038   // ebx: index
1039   // rdx: array
1040   index_check(rdx, rbx); // prefer index in ebx
1041   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1042   // Need to check whether array is boolean or byte
1043   // since both types share the bastore bytecode.
1044   __ load_klass(rcx, rdx);
1045   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1046   int diffbit = Klass::layout_helper_boolean_diffbit();
1047   __ testl(rcx, diffbit);
1048   Label L_skip;
1049   __ jccb(Assembler::zero, L_skip);
1050   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1051   __ bind(L_skip);
1052   __ movb(Address(rdx, rbx,
1053                   Address::times_1,
1054                   arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1055           rax);
1056 }
1057 
1058 void TemplateTable::castore() {
1059   transition(itos, vtos);
1060   __ pop_i(rbx);
1061   __ pop_ptr(rdx);
1062   // eax: value
1063   // ebx: index
1064   // rdx: array
1065   index_check(rdx, rbx);  // prefer index in ebx
1066   oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1067   __ movw(Address(rdx, rbx,
1068                   Address::times_2,
1069                   arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1070           rax);
1071 }
1072 
1073 void TemplateTable::sastore() {
1074   castore();
1075 }
1076 
1077 void TemplateTable::istore(int n) {
1078   transition(itos, vtos);
1079   __ movl(iaddress(n), rax);
1080 }
1081 
1082 void TemplateTable::lstore(int n) {
1083   transition(ltos, vtos);
1084   __ movq(laddress(n), rax);
1085 }
1086 


1851   __ bind(not_taken);
1852   __ profile_not_taken_branch(rax);
1853 }
1854 
1855 void TemplateTable::if_nullcmp(Condition cc) {
1856   transition(atos, vtos);
1857   // assume branch is more often taken than not (loops use backward branches)
1858   Label not_taken;
1859   __ testptr(rax, rax);
1860   __ jcc(j_not(cc), not_taken);
1861   branch(false, false);
1862   __ bind(not_taken);
1863   __ profile_not_taken_branch(rax);
1864 }
1865 
1866 void TemplateTable::if_acmp(Condition cc) {
1867   transition(atos, vtos);
1868   // assume branch is more often taken than not (loops use backward branches)
1869   Label not_taken;
1870   __ pop_ptr(rdx);
1871   __ cmpoops(rdx, rax);
1872   __ jcc(j_not(cc), not_taken);
1873   branch(false, false);
1874   __ bind(not_taken);
1875   __ profile_not_taken_branch(rax);
1876 }
1877 
1878 void TemplateTable::ret() {
1879   transition(vtos, vtos);
1880   locals_index(rbx);
1881   __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1882   __ profile_ret(rbx, rcx);
1883   __ get_method(rax);
1884   __ movptr(r13, Address(rax, Method::const_offset()));
1885   __ lea(r13, Address(r13, rbx, Address::times_1,
1886                       ConstMethod::codes_offset()));
1887   __ dispatch_next(vtos);
1888 }
1889 
1890 void TemplateTable::wide_ret() {
1891   transition(vtos, vtos);


2308 }
2309 
2310 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2311   transition(vtos, vtos);
2312 
2313   const Register cache = rcx;
2314   const Register index = rdx;
2315   const Register obj   = c_rarg3;
2316   const Register off   = rbx;
2317   const Register flags = rax;
2318   const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2319 
2320   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2321   jvmti_post_field_access(cache, index, is_static, false);
2322   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2323 
2324   if (!is_static) {
2325     // obj is on the stack
2326     pop_and_check_object(obj);
2327   }
2328   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, obj);
2329 
2330   const Address field(obj, off, Address::times_1);
2331 
2332   Label Done, notByte, notBool, notInt, notShort, notChar,
2333               notLong, notFloat, notObj, notDouble;
2334 
2335   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2336   // Make sure we don't need to mask edx after the above shift
2337   assert(btos == 0, "change code, btos != 0");
2338 
2339   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2340   __ jcc(Assembler::notZero, notByte);
2341   // btos
2342   __ load_signed_byte(rax, field);
2343   __ push(btos);
2344   // Rewrite bytecode to be faster
2345   if (!is_static) {
2346     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2347   }
2348   __ jmp(Done);


2547   __ movl(rdx, flags);
2548   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2549   __ andl(rdx, 0x1);
2550 
2551   // field address
2552   const Address field(obj, off, Address::times_1);
2553 
2554   Label notByte, notBool, notInt, notShort, notChar,
2555         notLong, notFloat, notObj, notDouble;
2556 
2557   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2558 
2559   assert(btos == 0, "change code, btos != 0");
2560   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2561   __ jcc(Assembler::notZero, notByte);
2562 
2563   // btos
2564   {
2565     __ pop(btos);
2566     if (!is_static) pop_and_check_object(obj);
2567     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2568     __ movb(field, rax);
2569     if (!is_static) {
2570       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2571     }
2572     __ jmp(Done);
2573   }
2574 
2575   __ bind(notByte);
2576   __ cmpl(flags, ztos);
2577   __ jcc(Assembler::notEqual, notBool);
2578 
2579   // ztos
2580   {
2581     __ pop(ztos);
2582     if (!is_static) pop_and_check_object(obj);
2583     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2584     __ andl(rax, 0x1);
2585     __ movb(field, rax);
2586     if (!is_static) {
2587       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2588     }
2589     __ jmp(Done);
2590   }
2591 
2592   __ bind(notBool);
2593   __ cmpl(flags, atos);
2594   __ jcc(Assembler::notEqual, notObj);
2595 
2596   // atos
2597   {
2598     __ pop(atos);
2599     if (!is_static) pop_and_check_object(obj);
2600     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2601     // Store into the field
2602     do_oop_store(_masm, field, rax, _bs->kind(), false);
2603     if (!is_static) {
2604       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2605     }
2606     __ jmp(Done);
2607   }
2608 
2609   __ bind(notObj);
2610   __ cmpl(flags, itos);
2611   __ jcc(Assembler::notEqual, notInt);
2612 
2613   // itos
2614   {
2615     __ pop(itos);
2616     if (!is_static) pop_and_check_object(obj);
2617     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2618     __ movl(field, rax);
2619     if (!is_static) {
2620       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2621     }
2622     __ jmp(Done);
2623   }
2624 
2625   __ bind(notInt);
2626   __ cmpl(flags, ctos);
2627   __ jcc(Assembler::notEqual, notChar);
2628 
2629   // ctos
2630   {
2631     __ pop(ctos);
2632     if (!is_static) pop_and_check_object(obj);
2633     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2634     __ movw(field, rax);
2635     if (!is_static) {
2636       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2637     }
2638     __ jmp(Done);
2639   }
2640 
2641   __ bind(notChar);
2642   __ cmpl(flags, stos);
2643   __ jcc(Assembler::notEqual, notShort);
2644 
2645   // stos
2646   {
2647     __ pop(stos);
2648     if (!is_static) pop_and_check_object(obj);
2649     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2650     __ movw(field, rax);
2651     if (!is_static) {
2652       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2653     }
2654     __ jmp(Done);
2655   }
2656 
2657   __ bind(notShort);
2658   __ cmpl(flags, ltos);
2659   __ jcc(Assembler::notEqual, notLong);
2660 
2661   // ltos
2662   {
2663     __ pop(ltos);
2664     if (!is_static) pop_and_check_object(obj);
2665     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2666     __ movq(field, rax);
2667     if (!is_static) {
2668       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2669     }
2670     __ jmp(Done);
2671   }
2672 
2673   __ bind(notLong);
2674   __ cmpl(flags, ftos);
2675   __ jcc(Assembler::notEqual, notFloat);
2676 
2677   // ftos
2678   {
2679     __ pop(ftos);
2680     if (!is_static) pop_and_check_object(obj);
2681     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2682     __ movflt(field, xmm0);
2683     if (!is_static) {
2684       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2685     }
2686     __ jmp(Done);
2687   }
2688 
2689   __ bind(notFloat);
2690 #ifdef ASSERT
2691   __ cmpl(flags, dtos);
2692   __ jcc(Assembler::notEqual, notDouble);
2693 #endif
2694 
2695   // dtos
2696   {
2697     __ pop(dtos);
2698     if (!is_static) pop_and_check_object(obj);
2699     oopDesc::bs()->interpreter_write_barrier(_masm, obj);
2700     __ movdbl(field, xmm0);
2701     if (!is_static) {
2702       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2703     }
2704   }
2705 
2706 #ifdef ASSERT
2707   __ jmp(Done);
2708 
2709   __ bind(notDouble);
2710   __ stop("Bad state");
2711 #endif
2712 
2713   __ bind(Done);
2714 
2715   // Check for volatile store
2716   __ testl(rdx, rdx);
2717   __ jcc(Assembler::zero, notVolatile);
2718   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2719                                                Assembler::StoreStore));


2795 
2796   // test for volatile with rdx
2797   __ movl(rdx, Address(rcx, rbx, Address::times_8,
2798                        in_bytes(base +
2799                                 ConstantPoolCacheEntry::flags_offset())));
2800 
2801   // replace index with field offset from cache entry
2802   __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2803                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2804 
2805   // [jk] not needed currently
2806   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2807   //                                              Assembler::StoreStore));
2808 
2809   Label notVolatile;
2810   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2811   __ andl(rdx, 0x1);
2812 
2813   // Get object from stack
2814   pop_and_check_object(rcx);
2815   oopDesc::bs()->interpreter_write_barrier(_masm, rcx);
2816 
2817   // field address
2818   const Address field(rcx, rbx, Address::times_1);
2819 
2820   // access field
2821   switch (bytecode()) {
2822   case Bytecodes::_fast_aputfield:
2823     do_oop_store(_masm, field, rax, _bs->kind(), false);
2824     break;
2825   case Bytecodes::_fast_lputfield:
2826     __ movq(field, rax);
2827     break;
2828   case Bytecodes::_fast_iputfield:
2829     __ movl(field, rax);
2830     break;
2831   case Bytecodes::_fast_zputfield:
2832     __ andl(rax, 0x1);  // boolean is true if LSB is 1
2833     // fall through to bputfield
2834   case Bytecodes::_fast_bputfield:
2835     __ movb(field, rax);


2885   }
2886 
2887   // access constant pool cache
2888   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2889   // replace index with field offset from cache entry
2890   // [jk] not needed currently
2891   // if (os::is_MP()) {
2892   //   __ movl(rdx, Address(rcx, rbx, Address::times_8,
2893   //                        in_bytes(ConstantPoolCache::base_offset() +
2894   //                                 ConstantPoolCacheEntry::flags_offset())));
2895   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2896   //   __ andl(rdx, 0x1);
2897   // }
2898   __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2899                          in_bytes(ConstantPoolCache::base_offset() +
2900                                   ConstantPoolCacheEntry::f2_offset())));
2901 
2902   // rax: object
2903   __ verify_oop(rax);
2904   __ null_check(rax);
2905   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax);
2906   Address field(rax, rbx, Address::times_1);
2907 
2908   // access field
2909   switch (bytecode()) {
2910   case Bytecodes::_fast_agetfield:
2911     __ load_heap_oop(rax, field);
2912     __ verify_oop(rax);
2913     break;
2914   case Bytecodes::_fast_lgetfield:
2915     __ movq(rax, field);
2916     break;
2917   case Bytecodes::_fast_igetfield:
2918     __ movl(rax, field);
2919     break;
2920   case Bytecodes::_fast_bgetfield:
2921     __ movsbl(rax, field);
2922     break;
2923   case Bytecodes::_fast_sgetfield:
2924     __ load_signed_short(rax, field);
2925     break;


2943   //   __ membar(Assembler::LoadLoad);
2944   //   __ bind(notVolatile);
2945   //};
2946 }
2947 
2948 void TemplateTable::fast_xaccess(TosState state) {
2949   transition(vtos, state);
2950 
2951   // get receiver
2952   __ movptr(rax, aaddress(0));
2953   // access constant pool cache
2954   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2955   __ movptr(rbx,
2956             Address(rcx, rdx, Address::times_8,
2957                     in_bytes(ConstantPoolCache::base_offset() +
2958                              ConstantPoolCacheEntry::f2_offset())));
2959   // make sure exception is reported in correct bcp range (getfield is
2960   // next instruction)
2961   __ increment(r13);
2962   __ null_check(rax);
2963   oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax);
2964   switch (state) {
2965   case itos:
2966     __ movl(rax, Address(rax, rbx, Address::times_1));
2967     break;
2968   case atos:
2969     __ load_heap_oop(rax, Address(rax, rbx, Address::times_1));
2970     __ verify_oop(rax);
2971     break;
2972   case ftos:
2973     __ movflt(xmm0, Address(rax, rbx, Address::times_1));
2974     break;
2975   default:
2976     ShouldNotReachHere();
2977   }
2978 
2979   // [jk] not needed currently
2980   // if (os::is_MP()) {
2981   //   Label notVolatile;
2982   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
2983   //                        in_bytes(ConstantPoolCache::base_offset() +


3381   __ jcc(Assembler::notEqual, slow_case);
3382 
3383   // get instance_size in InstanceKlass (scaled to a count of bytes)
3384   __ movl(rdx,
3385           Address(rsi,
3386                   Klass::layout_helper_offset()));
3387   // test to see if it has a finalizer or is malformed in some way
3388   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3389   __ jcc(Assembler::notZero, slow_case);
3390 
3391   // Allocate the instance
3392   // 1) Try to allocate in the TLAB
3393   // 2) if fail and the object is large allocate in the shared Eden
3394   // 3) if the above fails (or is not applicable), go to a slow case
3395   // (creates a new TLAB, etc.)
3396 
3397   const bool allow_shared_alloc =
3398     Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3399 
3400   if (UseTLAB) {
3401     uint oop_extra_words = Universe::heap()->oop_extra_words();
3402     if (oop_extra_words > 0) {
3403       __ addq(rdx, oop_extra_words * HeapWordSize);
3404     }
3405 
3406     __ movptr(rax, Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())));
3407     __ lea(rbx, Address(rax, rdx, Address::times_1));
3408     __ cmpptr(rbx, Address(r15_thread, in_bytes(JavaThread::tlab_end_offset())));
3409     __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3410     __ movptr(Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3411     Universe::heap()->compile_prepare_oop(_masm, rax);
3412     if (ZeroTLAB) {
3413       // the fields have been already cleared
3414       __ jmp(initialize_header);
3415     } else {
3416       // initialize both the header and fields
3417       __ jmp(initialize_object);
3418     }
3419   }
3420 
3421   // Allocation in the shared Eden, if allowed.
3422   //
3423   // rdx: instance size in bytes
3424   if (allow_shared_alloc) {
3425     __ bind(allocate_shared);
3426 
3427     ExternalAddress top((address)Universe::heap()->top_addr());
3428     ExternalAddress end((address)Universe::heap()->end_addr());
3429 
3430     const Register RtopAddr = rscratch1;
3431     const Register RendAddr = rscratch2;


3693 // Note: monitorenter & exit are symmetric routines; which is reflected
3694 //       in the assembly code structure as well
3695 //
3696 // Stack layout:
3697 //
3698 // [expressions  ] <--- rsp               = expression stack top
3699 // ..
3700 // [expressions  ]
3701 // [monitor entry] <--- monitor block top = expression stack bot
3702 // ..
3703 // [monitor entry]
3704 // [frame data   ] <--- monitor block bot
3705 // ...
3706 // [saved rbp    ] <--- rbp
3707 void TemplateTable::monitorenter() {
3708   transition(atos, vtos);
3709 
3710   // check for NULL object
3711   __ null_check(rax);
3712 
3713   // We need to preemptively evacuate the object, because we later compare
3714   // it to objects in the BasicObjectLock list, and we might get false negatives
3715   // if another thread evacuates the object in the meantime. See acmp.
3716   oopDesc::bs()->interpreter_write_barrier(_masm, rax);
3717 
3718   const Address monitor_block_top(
3719         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3720   const Address monitor_block_bot(
3721         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3722   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3723 
3724   Label allocated;
3725 
3726   // initialize entry pointer
3727   __ xorl(c_rarg1, c_rarg1); // points to free slot or NULL
3728 
3729   // find a free slot in the monitor block (result in c_rarg1)
3730   {
3731     Label entry, loop, exit;
3732     __ movptr(c_rarg3, monitor_block_top); // points to current entry,
3733                                      // starting with top-most entry
3734     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3735                                      // of monitor block
3736     __ jmpb_if_possible(entry);
3737 
3738     __ bind(loop);
3739     // check if current entry is used
3740     __ cmpptr(Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD);
3741     // if not used then remember entry in c_rarg1
3742     __ cmov(Assembler::equal, c_rarg1, c_rarg3);
3743     // check if current entry is for same object
3744     __ cmpptr(rax, Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()));
3745     // if same object then stop searching
3746     __ jccb(Assembler::equal, exit);
3747     // otherwise advance to next entry
3748     __ addptr(c_rarg3, entry_size);
3749     __ bind(entry);
3750     // check if bottom reached
3751     __ cmpptr(c_rarg3, c_rarg2);
3752     // if not at bottom then check this entry
3753     __ jcc(Assembler::notEqual, loop);
3754     __ bind(exit);
3755   }
3756 


3792   // store object
3793   __ movptr(Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()), rax);
3794   __ lock_object(c_rarg1);
3795 
3796   // check to make sure this monitor doesn't cause stack overflow after locking
3797   __ save_bcp();  // in case of exception
3798   __ generate_stack_overflow_check(0);
3799 
3800   // The bcp has already been incremented. Just need to dispatch to
3801   // next instruction.
3802   __ dispatch_next(vtos);
3803 }
3804 
3805 
3806 void TemplateTable::monitorexit() {
3807   transition(atos, vtos);
3808 
3809   // check for NULL object
3810   __ null_check(rax);
3811 
3812   // We need to preemptively evacuate the object, because we later compare
3813   // it to objects in the BasicObjectLock list, and we might get false negatives
3814   // if another thread evacuates the object in the meantime. See acmp.
3815   oopDesc::bs()->interpreter_write_barrier(_masm, rax);
3816 
3817   const Address monitor_block_top(
3818         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3819   const Address monitor_block_bot(
3820         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3821   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3822 
3823   Label found;
3824 
3825   // find matching slot
3826   {
3827     Label entry, loop;
3828     __ movptr(c_rarg1, monitor_block_top); // points to current entry,
3829                                      // starting with top-most entry
3830     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3831                                      // of monitor block
3832     __ jmpb_if_possible(entry);
3833 
3834     __ bind(loop);
3835     // check if current entry is for same object
3836     __ cmpptr(rax, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
3837     // if same object then stop searching
3838     __ jcc(Assembler::equal, found);
3839     // otherwise advance to next entry
3840     __ addptr(c_rarg1, entry_size);
3841     __ bind(entry);
3842     // check if bottom reached
3843     __ cmpptr(c_rarg1, c_rarg2);
3844     // if not at bottom then check this entry
3845     __ jcc(Assembler::notEqual, loop);
3846   }
3847 
3848   // error handling. Unlocking was not block-structured
3849   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3850                    InterpreterRuntime::throw_illegal_monitor_state_exception));
3851   __ should_not_reach_here();
3852 


< prev index next >