< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "asm/assembler.hpp"
  28 #include "asm/assembler.inline.hpp"
  29 #include "compiler/compiler_globals.hpp"
  30 #include "compiler/disassembler.hpp"
  31 #include "gc/shared/barrierSet.hpp"
  32 #include "gc/shared/barrierSetAssembler.hpp"
  33 #include "gc/shared/collectedHeap.inline.hpp"
  34 #include "gc/shared/tlab_globals.hpp"
  35 #include "interpreter/bytecodeHistogram.hpp"
  36 #include "interpreter/interpreter.hpp"

  37 #include "memory/resourceArea.hpp"
  38 #include "memory/universe.hpp"
  39 #include "oops/accessDecorators.hpp"

  40 #include "oops/compressedOops.inline.hpp"
  41 #include "oops/klass.inline.hpp"
  42 #include "prims/methodHandles.hpp"
  43 #include "runtime/continuation.hpp"
  44 #include "runtime/flags/flagSetting.hpp"
  45 #include "runtime/interfaceSupport.inline.hpp"
  46 #include "runtime/javaThread.hpp"
  47 #include "runtime/jniHandles.hpp"
  48 #include "runtime/objectMonitor.hpp"
  49 #include "runtime/os.hpp"
  50 #include "runtime/safepoint.hpp"
  51 #include "runtime/safepointMechanism.hpp"
  52 #include "runtime/sharedRuntime.hpp"
  53 #include "runtime/stubRoutines.hpp"
  54 #include "utilities/macros.hpp"
  55 #include "crc32c.h"
  56 
  57 #ifdef PRODUCT
  58 #define BLOCK_COMMENT(str) /* nothing */
  59 #define STOP(error) stop(error)

3906 
3907   int restore_offset;
3908   if (offset == -1) {
3909     restore_offset = restore_size - gp_reg_size;
3910   } else {
3911     restore_offset = offset + restore_size - gp_reg_size;
3912   }
3913   for (ReverseRegSetIterator<Register> it = set.rbegin(); *it != noreg; ++it) {
3914     movptr(*it, Address(rsp, restore_offset));
3915     restore_offset -= gp_reg_size;
3916   }
3917 
3918   if (offset == -1) {
3919     addptr(rsp, aligned_size);
3920   }
3921 }
3922 
3923 // Preserves the contents of address, destroys the contents length_in_bytes and temp.
3924 void MacroAssembler::zero_memory(Register address, Register length_in_bytes, int offset_in_bytes, Register temp) {
3925   assert(address != length_in_bytes && address != temp && temp != length_in_bytes, "registers must be different");
3926   assert((offset_in_bytes & (BytesPerWord - 1)) == 0, "offset must be a multiple of BytesPerWord");
3927   Label done;
3928 
3929   testptr(length_in_bytes, length_in_bytes);
3930   jcc(Assembler::zero, done);
3931 













3932   // initialize topmost word, divide index by 2, check if odd and test if zero
3933   // note: for the remaining code to work, index must be a multiple of BytesPerWord
3934 #ifdef ASSERT
3935   {
3936     Label L;
3937     testptr(length_in_bytes, BytesPerWord - 1);
3938     jcc(Assembler::zero, L);
3939     stop("length must be a multiple of BytesPerWord");
3940     bind(L);
3941   }
3942 #endif
3943   Register index = length_in_bytes;
3944   xorptr(temp, temp);    // use _zero reg to clear memory (shorter code)
3945   if (UseIncDec) {
3946     shrptr(index, 3);  // divide by 8/16 and set carry flag if bit 2 was set
3947   } else {
3948     shrptr(index, 2);  // use 2 instructions to avoid partial flag stall
3949     shrptr(index, 1);
3950   }
3951 #ifndef _LP64
3952   // index could have not been a multiple of 8 (i.e., bit 2 was set)
3953   {
3954     Label even;
3955     // note: if index was a multiple of 8, then it cannot
3956     //       be 0 now otherwise it must have been 0 before
3957     //       => if it is even, we don't need to check for 0 again
3958     jcc(Assembler::carryClear, even);
3959     // clear topmost word (no jump would be needed if conditional assignment worked here)
3960     movptr(Address(address, index, Address::times_8, offset_in_bytes - 0*BytesPerWord), temp);
3961     // index could be 0 now, must check again
3962     jcc(Assembler::zero, done);
3963     bind(even);
3964   }

4858 
4859 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
4860   // get mirror
4861   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
4862   load_method_holder(mirror, method);
4863   movptr(mirror, Address(mirror, mirror_offset));
4864   resolve_oop_handle(mirror, tmp);
4865 }
4866 
4867 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
4868   load_method_holder(rresult, rmethod);
4869   movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
4870 }
4871 
4872 void MacroAssembler::load_method_holder(Register holder, Register method) {
4873   movptr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
4874   movptr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
4875   movptr(holder, Address(holder, ConstantPool::pool_holder_offset_in_bytes())); // InstanceKlass*
4876 }
4877 
4878 void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
4879   assert_different_registers(src, tmp);
4880   assert_different_registers(dst, tmp);
4881 #ifdef _LP64
4882   if (UseCompressedClassPointers) {
4883     movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4884     decode_klass_not_null(dst, tmp);
4885   } else
4886 #endif
4887     movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
























4888 }

4889 
4890 void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
4891   assert_different_registers(src, tmp);
4892   assert_different_registers(dst, tmp);
4893 #ifdef _LP64
4894   if (UseCompressedClassPointers) {
4895     encode_klass_not_null(src, tmp);
4896     movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
4897   } else














4898 #endif
4899     movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
4900 }
4901 






4902 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
4903                                     Register tmp1, Register thread_tmp) {
4904   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
4905   decorators = AccessInternal::decorator_fixup(decorators);
4906   bool as_raw = (decorators & AS_RAW) != 0;
4907   if (as_raw) {
4908     bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
4909   } else {
4910     bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
4911   }
4912 }
4913 
4914 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register src,
4915                                      Register tmp1, Register tmp2, Register tmp3) {
4916   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
4917   decorators = AccessInternal::decorator_fixup(decorators);
4918   bool as_raw = (decorators & AS_RAW) != 0;
4919   if (as_raw) {
4920     bs->BarrierSetAssembler::store_at(this, decorators, type, dst, src, tmp1, tmp2, tmp3);
4921   } else {

4928   access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, thread_tmp);
4929 }
4930 
4931 // Doesn't do verification, generates fixed size code
4932 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
4933                                             Register thread_tmp, DecoratorSet decorators) {
4934   access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
4935 }
4936 
4937 void MacroAssembler::store_heap_oop(Address dst, Register src, Register tmp1,
4938                                     Register tmp2, Register tmp3, DecoratorSet decorators) {
4939   access_store_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, tmp2, tmp3);
4940 }
4941 
4942 // Used for storing NULLs.
4943 void MacroAssembler::store_heap_oop_null(Address dst) {
4944   access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
4945 }
4946 
4947 #ifdef _LP64
4948 void MacroAssembler::store_klass_gap(Register dst, Register src) {
4949   if (UseCompressedClassPointers) {
4950     // Store to klass gap in destination
4951     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
4952   }
4953 }
4954 
4955 #ifdef ASSERT
4956 void MacroAssembler::verify_heapbase(const char* msg) {
4957   assert (UseCompressedOops, "should be compressed");
4958   assert (Universe::heap() != NULL, "java heap should be initialized");
4959   if (CheckCompressedOops) {
4960     Label ok;
4961     const auto src2 = ExternalAddress((address)CompressedOops::ptrs_base_addr());
4962     assert(!src2.is_lval(), "should not be lval");
4963     const bool is_src2_reachable = reachable(src2);
4964     if (!is_src2_reachable) {
4965       push(rscratch1);  // cmpptr trashes rscratch1
4966     }
4967     cmpptr(r12_heapbase, src2);
4968     jcc(Assembler::equal, ok);
4969     STOP(msg);
4970     bind(ok);
4971     if (!is_src2_reachable) {
4972       pop(rscratch1);
4973     }
4974   }

5087     assert(LogMinObjAlignmentInBytes == CompressedOops::shift(), "decode alg wrong");
5088     if (LogMinObjAlignmentInBytes == Address::times_8) {
5089       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
5090     } else {
5091       if (dst != src) {
5092         movq(dst, src);
5093       }
5094       shlq(dst, LogMinObjAlignmentInBytes);
5095       if (CompressedOops::base() != NULL) {
5096         addq(dst, r12_heapbase);
5097       }
5098     }
5099   } else {
5100     assert (CompressedOops::base() == NULL, "sanity");
5101     if (dst != src) {
5102       movq(dst, src);
5103     }
5104   }
5105 }
5106 






























































5107 void MacroAssembler::encode_klass_not_null(Register r, Register tmp) {
5108   assert_different_registers(r, tmp);
5109   if (CompressedKlassPointers::base() != NULL) {











5110     mov64(tmp, (int64_t)CompressedKlassPointers::base());
5111     subq(r, tmp);


5112   }
5113   if (CompressedKlassPointers::shift() != 0) {
5114     assert (LogKlassAlignmentInBytes == CompressedKlassPointers::shift(), "decode alg wrong");
5115     shrq(r, LogKlassAlignmentInBytes);
5116   }
5117 }
5118 
5119 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5120   assert_different_registers(src, dst);
5121   if (CompressedKlassPointers::base() != NULL) {












5122     mov64(dst, -(int64_t)CompressedKlassPointers::base());
5123     addq(dst, src);
5124   } else {
5125     movptr(dst, src);
5126   }
5127   if (CompressedKlassPointers::shift() != 0) {
5128     assert (LogKlassAlignmentInBytes == CompressedKlassPointers::shift(), "decode alg wrong");
5129     shrq(dst, LogKlassAlignmentInBytes);
5130   }
5131 }
5132 
5133 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5134   assert_different_registers(r, tmp);
5135   // Note: it will change flags
5136   assert(UseCompressedClassPointers, "should only be used for compressed headers");
5137   // Cannot assert, unverified entry point counts instructions (see .ad file)
5138   // vtableStubs also counts instructions in pd_code_size_limit.
5139   // Also do not verify_oop as this is called by verify_oop.
5140   if (CompressedKlassPointers::shift() != 0) {
5141     assert(LogKlassAlignmentInBytes == CompressedKlassPointers::shift(), "decode alg wrong");
5142     shlq(r, LogKlassAlignmentInBytes);
5143   }
5144   if (CompressedKlassPointers::base() != NULL) {
5145     mov64(tmp, (int64_t)CompressedKlassPointers::base());









5146     addq(r, tmp);




5147   }
5148 }
5149 
5150 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5151   assert_different_registers(src, dst);
5152   // Note: it will change flags
5153   assert (UseCompressedClassPointers, "should only be used for compressed headers");
5154   // Cannot assert, unverified entry point counts instructions (see .ad file)
5155   // vtableStubs also counts instructions in pd_code_size_limit.
5156   // Also do not verify_oop as this is called by verify_oop.
5157 
5158   if (CompressedKlassPointers::base() == NULL &&
5159       CompressedKlassPointers::shift() == 0) {
5160     // The best case scenario is that there is no base or shift. Then it is already
5161     // a pointer that needs nothing but a register rename.
5162     movl(dst, src);
5163   } else {
5164     if (CompressedKlassPointers::base() != NULL) {
5165       mov64(dst, (int64_t)CompressedKlassPointers::base());
5166     } else {
5167       xorq(dst, dst);
5168     }
5169     if (CompressedKlassPointers::shift() != 0) {
5170       assert(LogKlassAlignmentInBytes == CompressedKlassPointers::shift(), "decode alg wrong");
5171       assert(LogKlassAlignmentInBytes == Address::times_8, "klass not aligned on 64bits?");
5172       leaq(dst, Address(dst, src, Address::times_8, 0));
5173     } else {
5174       addq(dst, src);
5175     }










5176   }
5177 }
5178 
5179 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5180   assert (UseCompressedOops, "should only be used for compressed headers");
5181   assert (Universe::heap() != NULL, "java heap should be initialized");
5182   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
5183   int oop_index = oop_recorder()->find_index(obj);
5184   RelocationHolder rspec = oop_Relocation::spec(oop_index);
5185   mov_narrow_oop(dst, oop_index, rspec);
5186 }
5187 
5188 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5189   assert (UseCompressedOops, "should only be used for compressed headers");
5190   assert (Universe::heap() != NULL, "java heap should be initialized");
5191   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
5192   int oop_index = oop_recorder()->find_index(obj);
5193   RelocationHolder rspec = oop_Relocation::spec(oop_index);
5194   mov_narrow_oop(dst, oop_index, rspec);
5195 }

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "asm/assembler.hpp"
  28 #include "asm/assembler.inline.hpp"
  29 #include "compiler/compiler_globals.hpp"
  30 #include "compiler/disassembler.hpp"
  31 #include "gc/shared/barrierSet.hpp"
  32 #include "gc/shared/barrierSetAssembler.hpp"
  33 #include "gc/shared/collectedHeap.inline.hpp"
  34 #include "gc/shared/tlab_globals.hpp"
  35 #include "interpreter/bytecodeHistogram.hpp"
  36 #include "interpreter/interpreter.hpp"
  37 #include "logging/log.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "memory/universe.hpp"
  40 #include "oops/accessDecorators.hpp"
  41 #include "oops/compressedKlass.inline.hpp"
  42 #include "oops/compressedOops.inline.hpp"
  43 #include "oops/klass.inline.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "runtime/continuation.hpp"
  46 #include "runtime/flags/flagSetting.hpp"
  47 #include "runtime/interfaceSupport.inline.hpp"
  48 #include "runtime/javaThread.hpp"
  49 #include "runtime/jniHandles.hpp"
  50 #include "runtime/objectMonitor.hpp"
  51 #include "runtime/os.hpp"
  52 #include "runtime/safepoint.hpp"
  53 #include "runtime/safepointMechanism.hpp"
  54 #include "runtime/sharedRuntime.hpp"
  55 #include "runtime/stubRoutines.hpp"
  56 #include "utilities/macros.hpp"
  57 #include "crc32c.h"
  58 
  59 #ifdef PRODUCT
  60 #define BLOCK_COMMENT(str) /* nothing */
  61 #define STOP(error) stop(error)

3908 
3909   int restore_offset;
3910   if (offset == -1) {
3911     restore_offset = restore_size - gp_reg_size;
3912   } else {
3913     restore_offset = offset + restore_size - gp_reg_size;
3914   }
3915   for (ReverseRegSetIterator<Register> it = set.rbegin(); *it != noreg; ++it) {
3916     movptr(*it, Address(rsp, restore_offset));
3917     restore_offset -= gp_reg_size;
3918   }
3919 
3920   if (offset == -1) {
3921     addptr(rsp, aligned_size);
3922   }
3923 }
3924 
3925 // Preserves the contents of address, destroys the contents length_in_bytes and temp.
3926 void MacroAssembler::zero_memory(Register address, Register length_in_bytes, int offset_in_bytes, Register temp) {
3927   assert(address != length_in_bytes && address != temp && temp != length_in_bytes, "registers must be different");
3928   assert((offset_in_bytes & (BytesPerInt - 1)) == 0, "offset must be a multiple of BytesPerInt");
3929   Label done;
3930 
3931   testptr(length_in_bytes, length_in_bytes);
3932   jcc(Assembler::zero, done);
3933 
3934   // Emit single 32bit store to clear leading bytes, if necessary.
3935   xorptr(temp, temp);    // use _zero reg to clear memory (shorter code)
3936 #ifdef _LP64
3937   if (!is_aligned(offset_in_bytes, BytesPerWord)) {
3938     movl(Address(address, offset_in_bytes), temp);
3939     offset_in_bytes += BytesPerInt;
3940     decrement(length_in_bytes, BytesPerInt);
3941   }
3942   assert((offset_in_bytes & (BytesPerWord - 1)) == 0, "offset must be a multiple of BytesPerWord");
3943   testptr(length_in_bytes, length_in_bytes);
3944   jcc(Assembler::zero, done);
3945 #endif
3946 
3947   // initialize topmost word, divide index by 2, check if odd and test if zero
3948   // note: for the remaining code to work, index must be a multiple of BytesPerWord
3949 #ifdef ASSERT
3950   {
3951     Label L;
3952     testptr(length_in_bytes, BytesPerWord - 1);
3953     jcc(Assembler::zero, L);
3954     stop("length must be a multiple of BytesPerWord");
3955     bind(L);
3956   }
3957 #endif
3958   Register index = length_in_bytes;

3959   if (UseIncDec) {
3960     shrptr(index, 3);  // divide by 8/16 and set carry flag if bit 2 was set
3961   } else {
3962     shrptr(index, 2);  // use 2 instructions to avoid partial flag stall
3963     shrptr(index, 1);
3964   }
3965 #ifndef _LP64
3966   // index could have not been a multiple of 8 (i.e., bit 2 was set)
3967   {
3968     Label even;
3969     // note: if index was a multiple of 8, then it cannot
3970     //       be 0 now otherwise it must have been 0 before
3971     //       => if it is even, we don't need to check for 0 again
3972     jcc(Assembler::carryClear, even);
3973     // clear topmost word (no jump would be needed if conditional assignment worked here)
3974     movptr(Address(address, index, Address::times_8, offset_in_bytes - 0*BytesPerWord), temp);
3975     // index could be 0 now, must check again
3976     jcc(Assembler::zero, done);
3977     bind(even);
3978   }

4872 
4873 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
4874   // get mirror
4875   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
4876   load_method_holder(mirror, method);
4877   movptr(mirror, Address(mirror, mirror_offset));
4878   resolve_oop_handle(mirror, tmp);
4879 }
4880 
4881 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
4882   load_method_holder(rresult, rmethod);
4883   movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
4884 }
4885 
4886 void MacroAssembler::load_method_holder(Register holder, Register method) {
4887   movptr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
4888   movptr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
4889   movptr(holder, Address(holder, ConstantPool::pool_holder_offset_in_bytes())); // InstanceKlass*
4890 }
4891 



4892 #ifdef _LP64
4893 void MacroAssembler::load_nklass(Register dst, Register src) {
4894   assert_different_registers(src, dst);
4895   assert(UseCompressedClassPointers, "expect compressed class pointers");
4896 
4897   Label slow, done;
4898   movq(dst, Address(src, oopDesc::mark_offset_in_bytes()));
4899   // NOTE: While it would seem nice to use xorb instead (for which we don't have an encoding in our assembler),
4900   // the encoding for xorq uses the signed version (0x81/6) of xor, which encodes as compact as xorb would,
4901   // and does't make a difference performance-wise.
4902   xorq(dst, markWord::unlocked_value);
4903   testb(dst, markWord::lock_mask_in_place);
4904   jccb(Assembler::notZero, slow);
4905 
4906   shrq(dst, markWord::klass_shift);
4907   jmp(done);
4908   bind(slow);
4909 
4910   if (dst != rax) {
4911     push(rax);
4912   }
4913   if (src != rax) {
4914     mov(rax, src);
4915   }
4916   call(RuntimeAddress(StubRoutines::load_nklass()));
4917   if (dst != rax) {
4918     mov(dst, rax);
4919     pop(rax);
4920   }
4921 
4922   bind(done);
4923 }
4924 #endif
4925 
4926 void MacroAssembler::load_klass(Register dst, Register src, Register tmp, bool null_check_src) {
4927   assert_different_registers(src, tmp);
4928   assert_different_registers(dst, tmp);
4929 #ifdef _LP64
4930   assert(UseCompressedClassPointers, "expect compressed class pointers");
4931   Register d = dst;
4932   if (src == dst) {
4933     d = tmp;
4934   }
4935   if (null_check_src) {
4936     null_check(src, oopDesc::mark_offset_in_bytes());
4937   }
4938   load_nklass(d, src);
4939   if (src == dst) {
4940     mov(dst, d);
4941   }
4942   decode_klass_not_null(dst, tmp);
4943 #else
4944   if (null_check_src) {
4945     null_check(src, oopDesc::klass_offset_in_bytes());
4946   }
4947   movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4948 #endif

4949 }
4950 
4951 #ifndef _LP64
4952 void MacroAssembler::store_klass(Register dst, Register src) {
4953   movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
4954 }
4955 #endif
4956 
4957 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
4958                                     Register tmp1, Register thread_tmp) {
4959   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
4960   decorators = AccessInternal::decorator_fixup(decorators);
4961   bool as_raw = (decorators & AS_RAW) != 0;
4962   if (as_raw) {
4963     bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
4964   } else {
4965     bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
4966   }
4967 }
4968 
4969 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register src,
4970                                      Register tmp1, Register tmp2, Register tmp3) {
4971   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
4972   decorators = AccessInternal::decorator_fixup(decorators);
4973   bool as_raw = (decorators & AS_RAW) != 0;
4974   if (as_raw) {
4975     bs->BarrierSetAssembler::store_at(this, decorators, type, dst, src, tmp1, tmp2, tmp3);
4976   } else {

4983   access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, thread_tmp);
4984 }
4985 
4986 // Doesn't do verification, generates fixed size code
4987 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
4988                                             Register thread_tmp, DecoratorSet decorators) {
4989   access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
4990 }
4991 
4992 void MacroAssembler::store_heap_oop(Address dst, Register src, Register tmp1,
4993                                     Register tmp2, Register tmp3, DecoratorSet decorators) {
4994   access_store_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, tmp2, tmp3);
4995 }
4996 
4997 // Used for storing NULLs.
4998 void MacroAssembler::store_heap_oop_null(Address dst) {
4999   access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
5000 }
5001 
5002 #ifdef _LP64







5003 #ifdef ASSERT
5004 void MacroAssembler::verify_heapbase(const char* msg) {
5005   assert (UseCompressedOops, "should be compressed");
5006   assert (Universe::heap() != NULL, "java heap should be initialized");
5007   if (CheckCompressedOops) {
5008     Label ok;
5009     const auto src2 = ExternalAddress((address)CompressedOops::ptrs_base_addr());
5010     assert(!src2.is_lval(), "should not be lval");
5011     const bool is_src2_reachable = reachable(src2);
5012     if (!is_src2_reachable) {
5013       push(rscratch1);  // cmpptr trashes rscratch1
5014     }
5015     cmpptr(r12_heapbase, src2);
5016     jcc(Assembler::equal, ok);
5017     STOP(msg);
5018     bind(ok);
5019     if (!is_src2_reachable) {
5020       pop(rscratch1);
5021     }
5022   }

5135     assert(LogMinObjAlignmentInBytes == CompressedOops::shift(), "decode alg wrong");
5136     if (LogMinObjAlignmentInBytes == Address::times_8) {
5137       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
5138     } else {
5139       if (dst != src) {
5140         movq(dst, src);
5141       }
5142       shlq(dst, LogMinObjAlignmentInBytes);
5143       if (CompressedOops::base() != NULL) {
5144         addq(dst, r12_heapbase);
5145       }
5146     }
5147   } else {
5148     assert (CompressedOops::base() == NULL, "sanity");
5149     if (dst != src) {
5150       movq(dst, src);
5151     }
5152   }
5153 }
5154 
5155 MacroAssembler::KlassDecodeMode MacroAssembler::_klass_decode_mode = KlassDecodeNone;
5156 
5157 // Returns a static string
5158 const char* MacroAssembler::describe_klass_decode_mode(MacroAssembler::KlassDecodeMode mode) {
5159   switch (mode) {
5160   case KlassDecodeNone: return "none";
5161   case KlassDecodeZero: return "zero";
5162   case KlassDecodeXor:  return "xor";
5163   case KlassDecodeAdd:  return "add";
5164   default:
5165     ShouldNotReachHere();
5166   }
5167   return NULL;
5168 }
5169 
5170 // Return the current narrow Klass pointer decode mode.
5171 MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode() {
5172   if (_klass_decode_mode == KlassDecodeNone) {
5173     // First time initialization
5174     assert(UseCompressedClassPointers, "not using compressed class pointers");
5175     assert(Metaspace::initialized(), "metaspace not initialized yet");
5176 
5177     _klass_decode_mode = klass_decode_mode_for_base(CompressedKlassPointers::base());
5178     guarantee(_klass_decode_mode != KlassDecodeNone,
5179               PTR_FORMAT " is not a valid encoding base on aarch64",
5180               p2i(CompressedKlassPointers::base()));
5181     log_info(metaspace)("klass decode mode initialized: %s", describe_klass_decode_mode(_klass_decode_mode));
5182   }
5183   return _klass_decode_mode;
5184 }
5185 
5186 // Given an arbitrary base address, return the KlassDecodeMode that would be used. Return KlassDecodeNone
5187 // if base address is not valid for encoding.
5188 MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode_for_base(address base) {
5189   assert(CompressedKlassPointers::shift() != 0, "not lilliput?");
5190 
5191   const uint64_t base_u64 = (uint64_t) base;
5192 
5193   if (base_u64 == 0) {
5194     return KlassDecodeZero;
5195   }
5196 
5197   if ((base_u64 & (KlassEncodingMetaspaceMax - 1)) == 0) {
5198     return KlassDecodeXor;
5199   }
5200 
5201   // Note that there is no point in optimizing for shift=3 since lilliput
5202   // will use larger shifts
5203 
5204   // The add+shift mode for decode_and_move_klass_not_null() requires the base to be
5205   //  shiftable-without-loss. So, this is the minimum restriction on x64 for a valid
5206   //  encoding base. This does not matter in reality since the shift values we use for
5207   //  Lilliput, while large, won't be larger than a page size. And the encoding base
5208   //  will be quite likely page aligned since it usually falls to the beginning of
5209   //  either CDS or CCS.
5210   if ((base_u64 & (KlassAlignmentInBytes - 1)) == 0) {
5211     return KlassDecodeAdd;
5212   }
5213 
5214   return KlassDecodeNone;
5215 }
5216 
5217 void MacroAssembler::encode_klass_not_null(Register r, Register tmp) {
5218   assert_different_registers(r, tmp);
5219   switch (klass_decode_mode()) {
5220   case KlassDecodeZero: {
5221     shrq(r, CompressedKlassPointers::shift());
5222     break;
5223   }
5224   case KlassDecodeXor: {
5225     mov64(tmp, (int64_t)CompressedKlassPointers::base());
5226     xorq(r, tmp);
5227     shrq(r, CompressedKlassPointers::shift());
5228     break;
5229   }
5230   case KlassDecodeAdd: {
5231     mov64(tmp, (int64_t)CompressedKlassPointers::base());
5232     subq(r, tmp);
5233     shrq(r, CompressedKlassPointers::shift());
5234     break;
5235   }
5236   default:
5237     ShouldNotReachHere();

5238   }
5239 }
5240 
5241 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5242   assert_different_registers(src, dst);
5243   switch (klass_decode_mode()) {
5244   case KlassDecodeZero: {
5245     movptr(dst, src);
5246     shrq(dst, CompressedKlassPointers::shift());
5247     break;
5248   }
5249   case KlassDecodeXor: {
5250     mov64(dst, (int64_t)CompressedKlassPointers::base());
5251     xorq(dst, src);
5252     shrq(dst, CompressedKlassPointers::shift());
5253     break;
5254   }
5255   case KlassDecodeAdd: {
5256     mov64(dst, -(int64_t)CompressedKlassPointers::base());
5257     addq(dst, src);
5258     shrq(dst, CompressedKlassPointers::shift());
5259     break;
5260   }
5261   default:
5262     ShouldNotReachHere();

5263   }
5264 }
5265 
5266 void  MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5267   assert_different_registers(r, tmp);
5268   const uint64_t base_u64 = (uint64_t)CompressedKlassPointers::base();
5269   switch (klass_decode_mode()) {
5270   case KlassDecodeZero: {
5271     shlq(r, CompressedKlassPointers::shift());
5272     break;



5273   }
5274   case KlassDecodeXor: {
5275     assert((base_u64 & (KlassEncodingMetaspaceMax - 1)) == 0,
5276            "base " UINT64_FORMAT_X " invalid for xor mode", base_u64); // should have been handled at VM init.
5277     shlq(r, CompressedKlassPointers::shift());
5278     mov64(tmp, base_u64);
5279     xorq(r, tmp);
5280     break;
5281   }
5282   case KlassDecodeAdd: {
5283     shlq(r, CompressedKlassPointers::shift());
5284     mov64(tmp, base_u64);
5285     addq(r, tmp);
5286     break;
5287   }
5288   default:
5289     ShouldNotReachHere();
5290   }
5291 }
5292 
5293 void  MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5294   assert_different_registers(src, dst);
5295   // Note: Cannot assert, unverified entry point counts instructions (see .ad file)


5296   // vtableStubs also counts instructions in pd_code_size_limit.
5297   // Also do not verify_oop as this is called by verify_oop.
5298 
5299   const uint64_t base_u64 = (uint64_t)CompressedKlassPointers::base();
5300 
5301   switch (klass_decode_mode()) {
5302   case KlassDecodeZero: {
5303     movq(dst, src);
5304     shlq(dst, CompressedKlassPointers::shift());
5305     break;
5306   }
5307   case KlassDecodeXor: {
5308     assert((base_u64 & (KlassEncodingMetaspaceMax - 1)) == 0,
5309            "base " UINT64_FORMAT_X " invalid for xor mode", base_u64); // should have been handled at VM init.
5310     const uint64_t base_right_shifted = base_u64 >> CompressedKlassPointers::shift();
5311     mov64(dst, base_right_shifted);
5312     xorq(dst, src);
5313     shlq(dst, CompressedKlassPointers::shift());
5314     break;
5315   }
5316   case KlassDecodeAdd: {
5317     assert((base_u64 & (KlassAlignmentInBytes - 1)) == 0,
5318            "base " UINT64_FORMAT_X " invalid for add mode", base_u64); // should have been handled at VM init.
5319     const uint64_t base_right_shifted = base_u64 >> CompressedKlassPointers::shift();
5320     mov64(dst, base_right_shifted);
5321     addq(dst, src);
5322     shlq(dst, CompressedKlassPointers::shift());
5323     break;
5324   }
5325   default:
5326     ShouldNotReachHere();
5327   }
5328 }
5329 
5330 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5331   assert (UseCompressedOops, "should only be used for compressed headers");
5332   assert (Universe::heap() != NULL, "java heap should be initialized");
5333   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
5334   int oop_index = oop_recorder()->find_index(obj);
5335   RelocationHolder rspec = oop_Relocation::spec(oop_index);
5336   mov_narrow_oop(dst, oop_index, rspec);
5337 }
5338 
5339 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5340   assert (UseCompressedOops, "should only be used for compressed headers");
5341   assert (Universe::heap() != NULL, "java heap should be initialized");
5342   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
5343   int oop_index = oop_recorder()->find_index(obj);
5344   RelocationHolder rspec = oop_Relocation::spec(oop_index);
5345   mov_narrow_oop(dst, oop_index, rspec);
5346 }
< prev index next >