< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page

 5639   BLOCK_COMMENT("encode_klass_not_null {");
 5640   assert_different_registers(r, tmp);
 5641   if (CompressedKlassPointers::base() != nullptr) {
 5642     if (AOTCodeCache::is_on_for_dump()) {
 5643       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5644     } else {
 5645       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5646     }
 5647     subq(r, tmp);
 5648   }
 5649   if (CompressedKlassPointers::shift() != 0) {
 5650     shrq(r, CompressedKlassPointers::shift());
 5651   }
 5652   BLOCK_COMMENT("} encode_klass_not_null");
 5653 }
 5654 
 5655 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5656   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5657   assert_different_registers(src, dst);
 5658   if (CompressedKlassPointers::base() != nullptr) {
 5659     movptr(dst, -(intptr_t)CompressedKlassPointers::base());





 5660     addq(dst, src);
 5661   } else {
 5662     movptr(dst, src);
 5663   }
 5664   if (CompressedKlassPointers::shift() != 0) {
 5665     shrq(dst, CompressedKlassPointers::shift());
 5666   }
 5667   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5668 }
 5669 
 5670 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5671   BLOCK_COMMENT("decode_klass_not_null {");
 5672   assert_different_registers(r, tmp);
 5673   // Note: it will change flags
 5674   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5675   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5676   // vtableStubs also counts instructions in pd_code_size_limit.
 5677   // Also do not verify_oop as this is called by verify_oop.
 5678   if (CompressedKlassPointers::shift() != 0) {
 5679     shlq(r, CompressedKlassPointers::shift());

 5685       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5686     }
 5687     addq(r, tmp);
 5688   }
 5689   BLOCK_COMMENT("} decode_klass_not_null");
 5690 }
 5691 
 5692 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5693   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5694   assert_different_registers(src, dst);
 5695   // Note: it will change flags
 5696   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5697   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5698   // vtableStubs also counts instructions in pd_code_size_limit.
 5699   // Also do not verify_oop as this is called by verify_oop.
 5700 
 5701   if (CompressedKlassPointers::base() == nullptr &&
 5702       CompressedKlassPointers::shift() == 0) {
 5703     // The best case scenario is that there is no base or shift. Then it is already
 5704     // a pointer that needs nothing but a register rename.
 5705     movl(dst, src);
 5706   } else {
 5707     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5708       if (CompressedKlassPointers::base() != nullptr) {
 5709         movptr(dst, (intptr_t)CompressedKlassPointers::base());




 5710       } else {
 5711         xorq(dst, dst);
 5712       }
 5713       if (CompressedKlassPointers::shift() != 0) {
 5714         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5715         leaq(dst, Address(dst, src, Address::times_8, 0));
 5716       } else {
 5717         addq(dst, src);
 5718       }
 5719     } else {
 5720       if (CompressedKlassPointers::base() != nullptr) {
 5721         const intptr_t base_right_shifted =
 5722             (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5723         movptr(dst, base_right_shifted);





 5724       } else {
 5725         xorq(dst, dst);
 5726       }
 5727       addq(dst, src);
 5728       shlq(dst, CompressedKlassPointers::shift());
 5729     }
 5730   }
 5731   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5732 }
 5733 
 5734 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5735   assert (UseCompressedOops, "should only be used for compressed headers");
 5736   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5737   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5738   int oop_index = oop_recorder()->find_index(obj);
 5739   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5740   mov_narrow_oop(dst, oop_index, rspec);
 5741 }
 5742 
 5743 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5784 }
 5785 
 5786 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5787   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5788   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5789   int klass_index = oop_recorder()->find_index(k);
 5790   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5791   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5792 }
 5793 
 5794 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5795   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5796   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5797   int klass_index = oop_recorder()->find_index(k);
 5798   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5799   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5800 }
 5801 
 5802 void MacroAssembler::reinit_heapbase() {
 5803   if (UseCompressedOops) {
 5804     if (Universe::heap() != nullptr) {
 5805       if (CompressedOops::base() == nullptr) {
 5806         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);


 5807       } else {
 5808         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5809       }
 5810     } else {
 5811       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5812     }
 5813   }
 5814 }
 5815 
 5816 #if COMPILER2_OR_JVMCI
 5817 
 5818 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5819 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5820   // cnt - number of qwords (8-byte words).
 5821   // base - start address, qword aligned.
 5822   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5823   bool use64byteVector = (MaxVectorSize == 64) && (CopyAVX3Threshold == 0);
 5824   if (use64byteVector) {
 5825     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5826   } else if (MaxVectorSize >= 32) {

 5639   BLOCK_COMMENT("encode_klass_not_null {");
 5640   assert_different_registers(r, tmp);
 5641   if (CompressedKlassPointers::base() != nullptr) {
 5642     if (AOTCodeCache::is_on_for_dump()) {
 5643       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5644     } else {
 5645       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5646     }
 5647     subq(r, tmp);
 5648   }
 5649   if (CompressedKlassPointers::shift() != 0) {
 5650     shrq(r, CompressedKlassPointers::shift());
 5651   }
 5652   BLOCK_COMMENT("} encode_klass_not_null");
 5653 }
 5654 
 5655 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5656   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5657   assert_different_registers(src, dst);
 5658   if (CompressedKlassPointers::base() != nullptr) {
 5659     if (AOTCodeCache::is_on_for_dump()) {
 5660       movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5661       negq(dst);
 5662     } else {
 5663       movptr(dst, -(intptr_t)CompressedKlassPointers::base());
 5664     }
 5665     addq(dst, src);
 5666   } else {
 5667     movptr(dst, src);
 5668   }
 5669   if (CompressedKlassPointers::shift() != 0) {
 5670     shrq(dst, CompressedKlassPointers::shift());
 5671   }
 5672   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5673 }
 5674 
 5675 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5676   BLOCK_COMMENT("decode_klass_not_null {");
 5677   assert_different_registers(r, tmp);
 5678   // Note: it will change flags
 5679   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5680   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5681   // vtableStubs also counts instructions in pd_code_size_limit.
 5682   // Also do not verify_oop as this is called by verify_oop.
 5683   if (CompressedKlassPointers::shift() != 0) {
 5684     shlq(r, CompressedKlassPointers::shift());

 5690       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5691     }
 5692     addq(r, tmp);
 5693   }
 5694   BLOCK_COMMENT("} decode_klass_not_null");
 5695 }
 5696 
 5697 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5698   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5699   assert_different_registers(src, dst);
 5700   // Note: it will change flags
 5701   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5702   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5703   // vtableStubs also counts instructions in pd_code_size_limit.
 5704   // Also do not verify_oop as this is called by verify_oop.
 5705 
 5706   if (CompressedKlassPointers::base() == nullptr &&
 5707       CompressedKlassPointers::shift() == 0) {
 5708     // The best case scenario is that there is no base or shift. Then it is already
 5709     // a pointer that needs nothing but a register rename.
 5710     movptr(dst, src);
 5711   } else {
 5712     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5713       if (CompressedKlassPointers::base() != nullptr) {
 5714         if (AOTCodeCache::is_on_for_dump()) {
 5715           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5716         } else {
 5717           movptr(dst, (intptr_t)CompressedKlassPointers::base());
 5718         }
 5719       } else {
 5720         xorq(dst, dst);
 5721       }
 5722       if (CompressedKlassPointers::shift() != 0) {
 5723         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5724         leaq(dst, Address(dst, src, Address::times_8, 0));
 5725       } else {
 5726         addq(dst, src);
 5727       }
 5728     } else {
 5729       if (CompressedKlassPointers::base() != nullptr) {
 5730         if (AOTCodeCache::is_on_for_dump()) {
 5731           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5732           shrq(dst, CompressedKlassPointers::shift());
 5733         } else {
 5734           const intptr_t base_right_shifted =
 5735                (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5736           movptr(dst, base_right_shifted);
 5737         }
 5738       } else {
 5739         xorq(dst, dst);
 5740       }
 5741       addq(dst, src);
 5742       shlq(dst, CompressedKlassPointers::shift());
 5743     }
 5744   }
 5745   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5746 }
 5747 
 5748 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5749   assert (UseCompressedOops, "should only be used for compressed headers");
 5750   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5751   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5752   int oop_index = oop_recorder()->find_index(obj);
 5753   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5754   mov_narrow_oop(dst, oop_index, rspec);
 5755 }
 5756 
 5757 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5798 }
 5799 
 5800 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5801   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5802   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5803   int klass_index = oop_recorder()->find_index(k);
 5804   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5805   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5806 }
 5807 
 5808 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5809   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5810   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5811   int klass_index = oop_recorder()->find_index(k);
 5812   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5813   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5814 }
 5815 
 5816 void MacroAssembler::reinit_heapbase() {
 5817   if (UseCompressedOops) {
 5818     if (Universe::heap() != nullptr) { // GC was initialized
 5819       if (CompressedOops::base() == nullptr) {
 5820         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
 5821       } else if (AOTCodeCache::is_on_for_dump()) {
 5822         movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5823       } else {
 5824         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5825       }
 5826     } else {
 5827       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5828     }
 5829   }
 5830 }
 5831 
 5832 #if COMPILER2_OR_JVMCI
 5833 
 5834 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5835 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5836   // cnt - number of qwords (8-byte words).
 5837   // base - start address, qword aligned.
 5838   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5839   bool use64byteVector = (MaxVectorSize == 64) && (CopyAVX3Threshold == 0);
 5840   if (use64byteVector) {
 5841     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5842   } else if (MaxVectorSize >= 32) {
< prev index next >