< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page

 5609   BLOCK_COMMENT("encode_klass_not_null {");
 5610   assert_different_registers(r, tmp);
 5611   if (CompressedKlassPointers::base() != nullptr) {
 5612     if (AOTCodeCache::is_on_for_dump()) {
 5613       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5614     } else {
 5615       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5616     }
 5617     subq(r, tmp);
 5618   }
 5619   if (CompressedKlassPointers::shift() != 0) {
 5620     shrq(r, CompressedKlassPointers::shift());
 5621   }
 5622   BLOCK_COMMENT("} encode_klass_not_null");
 5623 }
 5624 
 5625 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5626   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5627   assert_different_registers(src, dst);
 5628   if (CompressedKlassPointers::base() != nullptr) {
 5629     movptr(dst, -(intptr_t)CompressedKlassPointers::base());





 5630     addq(dst, src);
 5631   } else {
 5632     movptr(dst, src);
 5633   }
 5634   if (CompressedKlassPointers::shift() != 0) {
 5635     shrq(dst, CompressedKlassPointers::shift());
 5636   }
 5637   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5638 }
 5639 
 5640 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5641   BLOCK_COMMENT("decode_klass_not_null {");
 5642   assert_different_registers(r, tmp);
 5643   // Note: it will change flags
 5644   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5645   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5646   // vtableStubs also counts instructions in pd_code_size_limit.
 5647   // Also do not verify_oop as this is called by verify_oop.
 5648   if (CompressedKlassPointers::shift() != 0) {
 5649     shlq(r, CompressedKlassPointers::shift());

 5655       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5656     }
 5657     addq(r, tmp);
 5658   }
 5659   BLOCK_COMMENT("} decode_klass_not_null");
 5660 }
 5661 
 5662 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5663   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5664   assert_different_registers(src, dst);
 5665   // Note: it will change flags
 5666   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5667   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5668   // vtableStubs also counts instructions in pd_code_size_limit.
 5669   // Also do not verify_oop as this is called by verify_oop.
 5670 
 5671   if (CompressedKlassPointers::base() == nullptr &&
 5672       CompressedKlassPointers::shift() == 0) {
 5673     // The best case scenario is that there is no base or shift. Then it is already
 5674     // a pointer that needs nothing but a register rename.
 5675     movl(dst, src);
 5676   } else {
 5677     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5678       if (CompressedKlassPointers::base() != nullptr) {
 5679         movptr(dst, (intptr_t)CompressedKlassPointers::base());




 5680       } else {
 5681         xorq(dst, dst);
 5682       }
 5683       if (CompressedKlassPointers::shift() != 0) {
 5684         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5685         leaq(dst, Address(dst, src, Address::times_8, 0));
 5686       } else {
 5687         addq(dst, src);
 5688       }
 5689     } else {
 5690       if (CompressedKlassPointers::base() != nullptr) {
 5691         const intptr_t base_right_shifted =
 5692             (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5693         movptr(dst, base_right_shifted);





 5694       } else {
 5695         xorq(dst, dst);
 5696       }
 5697       addq(dst, src);
 5698       shlq(dst, CompressedKlassPointers::shift());
 5699     }
 5700   }
 5701   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5702 }
 5703 
 5704 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5705   assert (UseCompressedOops, "should only be used for compressed headers");
 5706   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5707   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5708   int oop_index = oop_recorder()->find_index(obj);
 5709   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5710   mov_narrow_oop(dst, oop_index, rspec);
 5711 }
 5712 
 5713 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5754 }
 5755 
 5756 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5757   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5758   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5759   int klass_index = oop_recorder()->find_index(k);
 5760   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5761   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5762 }
 5763 
 5764 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5765   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5766   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5767   int klass_index = oop_recorder()->find_index(k);
 5768   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5769   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5770 }
 5771 
 5772 void MacroAssembler::reinit_heapbase() {
 5773   if (UseCompressedOops) {
 5774     if (Universe::heap() != nullptr) {
 5775       if (CompressedOops::base() == nullptr) {
 5776         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);


 5777       } else {
 5778         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5779       }
 5780     } else {
 5781       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5782     }
 5783   }
 5784 }
 5785 
 5786 #if COMPILER2_OR_JVMCI
 5787 
 5788 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5789 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5790   // cnt - number of qwords (8-byte words).
 5791   // base - start address, qword aligned.
 5792   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5793   bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
 5794   if (use64byteVector) {
 5795     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5796   } else if (MaxVectorSize >= 32) {

 9981 // Resotres back legacy GPRs state from stack.
 9982 void MacroAssembler::restore_legacy_gprs() {
 9983   movq(r15, Address(rsp, 0));
 9984   movq(r14, Address(rsp, wordSize));
 9985   movq(r13, Address(rsp, 2 * wordSize));
 9986   movq(r12, Address(rsp, 3 * wordSize));
 9987   movq(r11, Address(rsp, 4 * wordSize));
 9988   movq(r10, Address(rsp, 5 * wordSize));
 9989   movq(r9,  Address(rsp, 6 * wordSize));
 9990   movq(r8,  Address(rsp, 7 * wordSize));
 9991   movq(rdi, Address(rsp, 8 * wordSize));
 9992   movq(rsi, Address(rsp, 9 * wordSize));
 9993   movq(rbp, Address(rsp, 10 * wordSize));
 9994   movq(rbx, Address(rsp, 12 * wordSize));
 9995   movq(rdx, Address(rsp, 13 * wordSize));
 9996   movq(rcx, Address(rsp, 14 * wordSize));
 9997   movq(rax, Address(rsp, 15 * wordSize));
 9998   addq(rsp, 16 * wordSize);
 9999 }
10000 














10001 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
10002   if (VM_Version::supports_apx_f()) {
10003     esetzucc(comparison, dst);
10004   } else {
10005     setb(comparison, dst);
10006     movzbl(dst, dst);
10007   }
10008 }

 5609   BLOCK_COMMENT("encode_klass_not_null {");
 5610   assert_different_registers(r, tmp);
 5611   if (CompressedKlassPointers::base() != nullptr) {
 5612     if (AOTCodeCache::is_on_for_dump()) {
 5613       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5614     } else {
 5615       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5616     }
 5617     subq(r, tmp);
 5618   }
 5619   if (CompressedKlassPointers::shift() != 0) {
 5620     shrq(r, CompressedKlassPointers::shift());
 5621   }
 5622   BLOCK_COMMENT("} encode_klass_not_null");
 5623 }
 5624 
 5625 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5626   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5627   assert_different_registers(src, dst);
 5628   if (CompressedKlassPointers::base() != nullptr) {
 5629     if (AOTCodeCache::is_on_for_dump()) {
 5630       movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5631       negq(dst);
 5632     } else {
 5633       movptr(dst, -(intptr_t)CompressedKlassPointers::base());
 5634     }
 5635     addq(dst, src);
 5636   } else {
 5637     movptr(dst, src);
 5638   }
 5639   if (CompressedKlassPointers::shift() != 0) {
 5640     shrq(dst, CompressedKlassPointers::shift());
 5641   }
 5642   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5643 }
 5644 
 5645 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5646   BLOCK_COMMENT("decode_klass_not_null {");
 5647   assert_different_registers(r, tmp);
 5648   // Note: it will change flags
 5649   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5650   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5651   // vtableStubs also counts instructions in pd_code_size_limit.
 5652   // Also do not verify_oop as this is called by verify_oop.
 5653   if (CompressedKlassPointers::shift() != 0) {
 5654     shlq(r, CompressedKlassPointers::shift());

 5660       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5661     }
 5662     addq(r, tmp);
 5663   }
 5664   BLOCK_COMMENT("} decode_klass_not_null");
 5665 }
 5666 
 5667 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5668   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5669   assert_different_registers(src, dst);
 5670   // Note: it will change flags
 5671   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5672   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5673   // vtableStubs also counts instructions in pd_code_size_limit.
 5674   // Also do not verify_oop as this is called by verify_oop.
 5675 
 5676   if (CompressedKlassPointers::base() == nullptr &&
 5677       CompressedKlassPointers::shift() == 0) {
 5678     // The best case scenario is that there is no base or shift. Then it is already
 5679     // a pointer that needs nothing but a register rename.
 5680     movptr(dst, src);
 5681   } else {
 5682     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5683       if (CompressedKlassPointers::base() != nullptr) {
 5684         if (AOTCodeCache::is_on_for_dump()) {
 5685           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5686         } else {
 5687           movptr(dst, (intptr_t)CompressedKlassPointers::base());
 5688         }
 5689       } else {
 5690         xorq(dst, dst);
 5691       }
 5692       if (CompressedKlassPointers::shift() != 0) {
 5693         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5694         leaq(dst, Address(dst, src, Address::times_8, 0));
 5695       } else {
 5696         addq(dst, src);
 5697       }
 5698     } else {
 5699       if (CompressedKlassPointers::base() != nullptr) {
 5700         if (AOTCodeCache::is_on_for_dump()) {
 5701           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5702           shrq(dst, CompressedKlassPointers::shift());
 5703         } else {
 5704           const intptr_t base_right_shifted =
 5705                (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5706           movptr(dst, base_right_shifted);
 5707         }
 5708       } else {
 5709         xorq(dst, dst);
 5710       }
 5711       addq(dst, src);
 5712       shlq(dst, CompressedKlassPointers::shift());
 5713     }
 5714   }
 5715   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5716 }
 5717 
 5718 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5719   assert (UseCompressedOops, "should only be used for compressed headers");
 5720   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5721   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5722   int oop_index = oop_recorder()->find_index(obj);
 5723   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5724   mov_narrow_oop(dst, oop_index, rspec);
 5725 }
 5726 
 5727 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5768 }
 5769 
 5770 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5771   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5772   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5773   int klass_index = oop_recorder()->find_index(k);
 5774   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5775   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5776 }
 5777 
 5778 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5779   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5780   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5781   int klass_index = oop_recorder()->find_index(k);
 5782   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5783   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5784 }
 5785 
 5786 void MacroAssembler::reinit_heapbase() {
 5787   if (UseCompressedOops) {
 5788     if (Universe::heap() != nullptr) { // GC was initialized
 5789       if (CompressedOops::base() == nullptr) {
 5790         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
 5791       } else if (AOTCodeCache::is_on_for_dump()) {
 5792         movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5793       } else {
 5794         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5795       }
 5796     } else {
 5797       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5798     }
 5799   }
 5800 }
 5801 
 5802 #if COMPILER2_OR_JVMCI
 5803 
 5804 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5805 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5806   // cnt - number of qwords (8-byte words).
 5807   // base - start address, qword aligned.
 5808   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5809   bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
 5810   if (use64byteVector) {
 5811     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5812   } else if (MaxVectorSize >= 32) {

 9997 // Resotres back legacy GPRs state from stack.
 9998 void MacroAssembler::restore_legacy_gprs() {
 9999   movq(r15, Address(rsp, 0));
10000   movq(r14, Address(rsp, wordSize));
10001   movq(r13, Address(rsp, 2 * wordSize));
10002   movq(r12, Address(rsp, 3 * wordSize));
10003   movq(r11, Address(rsp, 4 * wordSize));
10004   movq(r10, Address(rsp, 5 * wordSize));
10005   movq(r9,  Address(rsp, 6 * wordSize));
10006   movq(r8,  Address(rsp, 7 * wordSize));
10007   movq(rdi, Address(rsp, 8 * wordSize));
10008   movq(rsi, Address(rsp, 9 * wordSize));
10009   movq(rbp, Address(rsp, 10 * wordSize));
10010   movq(rbx, Address(rsp, 12 * wordSize));
10011   movq(rdx, Address(rsp, 13 * wordSize));
10012   movq(rcx, Address(rsp, 14 * wordSize));
10013   movq(rax, Address(rsp, 15 * wordSize));
10014   addq(rsp, 16 * wordSize);
10015 }
10016 
10017 void MacroAssembler::load_aotrc_address(Register reg, address a) {
10018 #if INCLUDE_CDS
10019   assert(AOTRuntimeConstants::contains(a), "address out of range for data area");
10020   if (AOTCodeCache::is_on_for_dump()) {
10021     // all aotrc field addresses should be registered in the AOTCodeCache address table
10022     lea(reg, ExternalAddress(a));
10023   } else {
10024     mov64(reg, (uint64_t)a);
10025   }
10026 #else
10027   ShouldNotReachHere();
10028 #endif
10029 }
10030 
10031 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
10032   if (VM_Version::supports_apx_f()) {
10033     esetzucc(comparison, dst);
10034   } else {
10035     setb(comparison, dst);
10036     movzbl(dst, dst);
10037   }
10038 }
< prev index next >