< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page

 5631   BLOCK_COMMENT("encode_klass_not_null {");
 5632   assert_different_registers(r, tmp);
 5633   if (CompressedKlassPointers::base() != nullptr) {
 5634     if (AOTCodeCache::is_on_for_dump()) {
 5635       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5636     } else {
 5637       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5638     }
 5639     subq(r, tmp);
 5640   }
 5641   if (CompressedKlassPointers::shift() != 0) {
 5642     shrq(r, CompressedKlassPointers::shift());
 5643   }
 5644   BLOCK_COMMENT("} encode_klass_not_null");
 5645 }
 5646 
 5647 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5648   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5649   assert_different_registers(src, dst);
 5650   if (CompressedKlassPointers::base() != nullptr) {
 5651     movptr(dst, -(intptr_t)CompressedKlassPointers::base());





 5652     addq(dst, src);
 5653   } else {
 5654     movptr(dst, src);
 5655   }
 5656   if (CompressedKlassPointers::shift() != 0) {
 5657     shrq(dst, CompressedKlassPointers::shift());
 5658   }
 5659   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5660 }
 5661 
 5662 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5663   BLOCK_COMMENT("decode_klass_not_null {");
 5664   assert_different_registers(r, tmp);
 5665   // Note: it will change flags
 5666   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5667   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5668   // vtableStubs also counts instructions in pd_code_size_limit.
 5669   // Also do not verify_oop as this is called by verify_oop.
 5670   if (CompressedKlassPointers::shift() != 0) {
 5671     shlq(r, CompressedKlassPointers::shift());

 5677       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5678     }
 5679     addq(r, tmp);
 5680   }
 5681   BLOCK_COMMENT("} decode_klass_not_null");
 5682 }
 5683 
 5684 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5685   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5686   assert_different_registers(src, dst);
 5687   // Note: it will change flags
 5688   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5689   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5690   // vtableStubs also counts instructions in pd_code_size_limit.
 5691   // Also do not verify_oop as this is called by verify_oop.
 5692 
 5693   if (CompressedKlassPointers::base() == nullptr &&
 5694       CompressedKlassPointers::shift() == 0) {
 5695     // The best case scenario is that there is no base or shift. Then it is already
 5696     // a pointer that needs nothing but a register rename.
 5697     movl(dst, src);
 5698   } else {
 5699     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5700       if (CompressedKlassPointers::base() != nullptr) {
 5701         movptr(dst, (intptr_t)CompressedKlassPointers::base());




 5702       } else {
 5703         xorq(dst, dst);
 5704       }
 5705       if (CompressedKlassPointers::shift() != 0) {
 5706         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5707         leaq(dst, Address(dst, src, Address::times_8, 0));
 5708       } else {
 5709         addq(dst, src);
 5710       }
 5711     } else {
 5712       if (CompressedKlassPointers::base() != nullptr) {
 5713         const intptr_t base_right_shifted =
 5714             (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5715         movptr(dst, base_right_shifted);





 5716       } else {
 5717         xorq(dst, dst);
 5718       }
 5719       addq(dst, src);
 5720       shlq(dst, CompressedKlassPointers::shift());
 5721     }
 5722   }
 5723   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5724 }
 5725 
 5726 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5727   assert (UseCompressedOops, "should only be used for compressed headers");
 5728   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5729   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5730   int oop_index = oop_recorder()->find_index(obj);
 5731   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5732   mov_narrow_oop(dst, oop_index, rspec);
 5733 }
 5734 
 5735 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5776 }
 5777 
 5778 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5779   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5780   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5781   int klass_index = oop_recorder()->find_index(k);
 5782   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5783   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5784 }
 5785 
 5786 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5787   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5788   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5789   int klass_index = oop_recorder()->find_index(k);
 5790   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5791   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5792 }
 5793 
 5794 void MacroAssembler::reinit_heapbase() {
 5795   if (UseCompressedOops) {
 5796     if (Universe::heap() != nullptr) {
 5797       if (CompressedOops::base() == nullptr) {
 5798         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);


 5799       } else {
 5800         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5801       }
 5802     } else {
 5803       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5804     }
 5805   }
 5806 }
 5807 
 5808 #if COMPILER2_OR_JVMCI
 5809 
 5810 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5811 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5812   // cnt - number of qwords (8-byte words).
 5813   // base - start address, qword aligned.
 5814   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5815   bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
 5816   if (use64byteVector) {
 5817     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5818   } else if (MaxVectorSize >= 32) {

 5631   BLOCK_COMMENT("encode_klass_not_null {");
 5632   assert_different_registers(r, tmp);
 5633   if (CompressedKlassPointers::base() != nullptr) {
 5634     if (AOTCodeCache::is_on_for_dump()) {
 5635       movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
 5636     } else {
 5637       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5638     }
 5639     subq(r, tmp);
 5640   }
 5641   if (CompressedKlassPointers::shift() != 0) {
 5642     shrq(r, CompressedKlassPointers::shift());
 5643   }
 5644   BLOCK_COMMENT("} encode_klass_not_null");
 5645 }
 5646 
 5647 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
 5648   BLOCK_COMMENT("encode_and_move_klass_not_null {");
 5649   assert_different_registers(src, dst);
 5650   if (CompressedKlassPointers::base() != nullptr) {
 5651     if (AOTCodeCache::is_on_for_dump()) {
 5652       movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5653       negq(dst);
 5654     } else {
 5655       movptr(dst, -(intptr_t)CompressedKlassPointers::base());
 5656     }
 5657     addq(dst, src);
 5658   } else {
 5659     movptr(dst, src);
 5660   }
 5661   if (CompressedKlassPointers::shift() != 0) {
 5662     shrq(dst, CompressedKlassPointers::shift());
 5663   }
 5664   BLOCK_COMMENT("} encode_and_move_klass_not_null");
 5665 }
 5666 
 5667 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
 5668   BLOCK_COMMENT("decode_klass_not_null {");
 5669   assert_different_registers(r, tmp);
 5670   // Note: it will change flags
 5671   assert(UseCompressedClassPointers, "should only be used for compressed headers");
 5672   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5673   // vtableStubs also counts instructions in pd_code_size_limit.
 5674   // Also do not verify_oop as this is called by verify_oop.
 5675   if (CompressedKlassPointers::shift() != 0) {
 5676     shlq(r, CompressedKlassPointers::shift());

 5682       movptr(tmp, (intptr_t)CompressedKlassPointers::base());
 5683     }
 5684     addq(r, tmp);
 5685   }
 5686   BLOCK_COMMENT("} decode_klass_not_null");
 5687 }
 5688 
 5689 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
 5690   BLOCK_COMMENT("decode_and_move_klass_not_null {");
 5691   assert_different_registers(src, dst);
 5692   // Note: it will change flags
 5693   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5694   // Cannot assert, unverified entry point counts instructions (see .ad file)
 5695   // vtableStubs also counts instructions in pd_code_size_limit.
 5696   // Also do not verify_oop as this is called by verify_oop.
 5697 
 5698   if (CompressedKlassPointers::base() == nullptr &&
 5699       CompressedKlassPointers::shift() == 0) {
 5700     // The best case scenario is that there is no base or shift. Then it is already
 5701     // a pointer that needs nothing but a register rename.
 5702     movptr(dst, src);
 5703   } else {
 5704     if (CompressedKlassPointers::shift() <= Address::times_8) {
 5705       if (CompressedKlassPointers::base() != nullptr) {
 5706         if (AOTCodeCache::is_on_for_dump()) {
 5707           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5708         } else {
 5709           movptr(dst, (intptr_t)CompressedKlassPointers::base());
 5710         }
 5711       } else {
 5712         xorq(dst, dst);
 5713       }
 5714       if (CompressedKlassPointers::shift() != 0) {
 5715         assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
 5716         leaq(dst, Address(dst, src, Address::times_8, 0));
 5717       } else {
 5718         addq(dst, src);
 5719       }
 5720     } else {
 5721       if (CompressedKlassPointers::base() != nullptr) {
 5722         if (AOTCodeCache::is_on_for_dump()) {
 5723           movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
 5724           shrq(dst, CompressedKlassPointers::shift());
 5725         } else {
 5726           const intptr_t base_right_shifted =
 5727                (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
 5728           movptr(dst, base_right_shifted);
 5729         }
 5730       } else {
 5731         xorq(dst, dst);
 5732       }
 5733       addq(dst, src);
 5734       shlq(dst, CompressedKlassPointers::shift());
 5735     }
 5736   }
 5737   BLOCK_COMMENT("} decode_and_move_klass_not_null");
 5738 }
 5739 
 5740 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
 5741   assert (UseCompressedOops, "should only be used for compressed headers");
 5742   assert (Universe::heap() != nullptr, "java heap should be initialized");
 5743   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5744   int oop_index = oop_recorder()->find_index(obj);
 5745   RelocationHolder rspec = oop_Relocation::spec(oop_index);
 5746   mov_narrow_oop(dst, oop_index, rspec);
 5747 }
 5748 
 5749 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {

 5790 }
 5791 
 5792 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
 5793   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5794   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5795   int klass_index = oop_recorder()->find_index(k);
 5796   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5797   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5798 }
 5799 
 5800 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
 5801   assert (UseCompressedClassPointers, "should only be used for compressed headers");
 5802   assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
 5803   int klass_index = oop_recorder()->find_index(k);
 5804   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
 5805   Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
 5806 }
 5807 
 5808 void MacroAssembler::reinit_heapbase() {
 5809   if (UseCompressedOops) {
 5810     if (Universe::heap() != nullptr) { // GC was initialized
 5811       if (CompressedOops::base() == nullptr) {
 5812         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
 5813       } else if (AOTCodeCache::is_on_for_dump()) {
 5814         movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5815       } else {
 5816         mov64(r12_heapbase, (int64_t)CompressedOops::base());
 5817       }
 5818     } else {
 5819       movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
 5820     }
 5821   }
 5822 }
 5823 
 5824 #if COMPILER2_OR_JVMCI
 5825 
 5826 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
 5827 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
 5828   // cnt - number of qwords (8-byte words).
 5829   // base - start address, qword aligned.
 5830   Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
 5831   bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
 5832   if (use64byteVector) {
 5833     vpxor(xtmp, xtmp, xtmp, AVX_512bit);
 5834   } else if (MaxVectorSize >= 32) {
< prev index next >