< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp

Print this page

  28 #include "precompiled.hpp"
  29 #include "asm/assembler.hpp"
  30 #include "asm/assembler.inline.hpp"
  31 #include "ci/ciEnv.hpp"
  32 #include "compiler/compileTask.hpp"
  33 #include "compiler/disassembler.hpp"
  34 #include "compiler/oopMap.hpp"
  35 #include "gc/shared/barrierSet.hpp"
  36 #include "gc/shared/barrierSetAssembler.hpp"
  37 #include "gc/shared/cardTableBarrierSet.hpp"
  38 #include "gc/shared/cardTable.hpp"
  39 #include "gc/shared/collectedHeap.hpp"
  40 #include "gc/shared/tlab_globals.hpp"
  41 #include "interpreter/bytecodeHistogram.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "jvm.h"
  44 #include "memory/resourceArea.hpp"
  45 #include "memory/universe.hpp"
  46 #include "nativeInst_aarch64.hpp"
  47 #include "oops/accessDecorators.hpp"

  48 #include "oops/compressedOops.inline.hpp"
  49 #include "oops/klass.inline.hpp"
  50 #include "runtime/continuation.hpp"
  51 #include "runtime/icache.hpp"
  52 #include "runtime/interfaceSupport.inline.hpp"
  53 #include "runtime/javaThread.hpp"
  54 #include "runtime/jniHandles.inline.hpp"
  55 #include "runtime/sharedRuntime.hpp"
  56 #include "runtime/stubRoutines.hpp"
  57 #include "utilities/powerOfTwo.hpp"
  58 #ifdef COMPILER1
  59 #include "c1/c1_LIRAssembler.hpp"
  60 #endif
  61 #ifdef COMPILER2
  62 #include "oops/oop.hpp"
  63 #include "opto/compile.hpp"
  64 #include "opto/node.hpp"
  65 #include "opto/output.hpp"
  66 #endif
  67 

4297   adrp(rscratch1, src2, offset);
4298   ldr(rscratch1, Address(rscratch1, offset));
4299   cmp(src1, rscratch1);
4300 }
4301 
4302 void MacroAssembler::cmpoop(Register obj1, Register obj2) {
4303   cmp(obj1, obj2);
4304 }
4305 
4306 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
4307   load_method_holder(rresult, rmethod);
4308   ldr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
4309 }
4310 
4311 void MacroAssembler::load_method_holder(Register holder, Register method) {
4312   ldr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
4313   ldr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
4314   ldr(holder, Address(holder, ConstantPool::pool_holder_offset_in_bytes())); // InstanceKlass*
4315 }
4316 
























4317 void MacroAssembler::load_klass(Register dst, Register src) {
4318   if (UseCompressedClassPointers) {
4319     ldrw(dst, Address(src, oopDesc::klass_offset_in_bytes()));




4320     decode_klass_not_null(dst);
4321   } else {
4322     ldr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4323   }
4324 }
4325 
4326 // ((OopHandle)result).resolve();
4327 void MacroAssembler::resolve_oop_handle(Register result, Register tmp1, Register tmp2) {
4328   // OopHandle::resolve is an indirection.
4329   access_load_at(T_OBJECT, IN_NATIVE, result, Address(result, 0), tmp1, tmp2);
4330 }
4331 
4332 // ((WeakHandle)result).resolve();
4333 void MacroAssembler::resolve_weak_handle(Register result, Register tmp1, Register tmp2) {
4334   assert_different_registers(result, tmp1, tmp2);
4335   Label resolved;
4336 
4337   // A null weak handle resolves to null.
4338   cbz(result, resolved);
4339 
4340   // Only 64 bit platforms support GCs that require a tmp register
4341   // WeakHandle::resolve is an indirection like jweak.
4342   access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
4343                  result, Address(result), tmp1, tmp2);
4344   bind(resolved);
4345 }
4346 
4347 void MacroAssembler::load_mirror(Register dst, Register method, Register tmp1, Register tmp2) {
4348   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
4349   ldr(dst, Address(rmethod, Method::const_offset()));
4350   ldr(dst, Address(dst, ConstMethod::constants_offset()));
4351   ldr(dst, Address(dst, ConstantPool::pool_holder_offset_in_bytes()));
4352   ldr(dst, Address(dst, mirror_offset));
4353   resolve_oop_handle(dst, tmp1, tmp2);
4354 }
4355 
4356 void MacroAssembler::cmp_klass(Register oop, Register trial_klass, Register tmp) {

4357   if (UseCompressedClassPointers) {
4358     ldrw(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));




4359     if (CompressedKlassPointers::base() == nullptr) {
4360       cmp(trial_klass, tmp, LSL, CompressedKlassPointers::shift());
4361       return;
4362     } else if (((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
4363                && CompressedKlassPointers::shift() == 0) {
4364       // Only the bottom 32 bits matter
4365       cmpw(trial_klass, tmp);
4366       return;
4367     }
4368     decode_klass_not_null(tmp);
4369   } else {
4370     ldr(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
4371   }
4372   cmp(trial_klass, tmp);
4373 }
4374 
4375 void MacroAssembler::store_klass(Register dst, Register src) {
4376   // FIXME: Should this be a store release?  concurrent gcs assumes
4377   // klass length is valid if klass field is not null.
4378   if (UseCompressedClassPointers) {

4507   // Cannot assert, unverified entry point counts instructions (see .ad file)
4508   // vtableStubs also counts instructions in pd_code_size_limit.
4509   // Also do not verify_oop as this is called by verify_oop.
4510   if (CompressedOops::shift() != 0) {
4511     assert(LogMinObjAlignmentInBytes == CompressedOops::shift(), "decode alg wrong");
4512     if (CompressedOops::base() != nullptr) {
4513       add(dst, rheapbase, src, Assembler::LSL, LogMinObjAlignmentInBytes);
4514     } else {
4515       add(dst, zr, src, Assembler::LSL, LogMinObjAlignmentInBytes);
4516     }
4517   } else {
4518     assert (CompressedOops::base() == nullptr, "sanity");
4519     if (dst != src) {
4520       mov(dst, src);
4521     }
4522   }
4523 }
4524 
4525 MacroAssembler::KlassDecodeMode MacroAssembler::_klass_decode_mode(KlassDecodeNone);
4526 














4527 MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode() {
4528   assert(UseCompressedClassPointers, "not using compressed class pointers");
4529   assert(Metaspace::initialized(), "metaspace not initialized yet");


4530 
4531   if (_klass_decode_mode != KlassDecodeNone) {
4532     return _klass_decode_mode;



4533   }






4534 
4535   assert(LogKlassAlignmentInBytes == CompressedKlassPointers::shift()
4536          || 0 == CompressedKlassPointers::shift(), "decode alg wrong");
4537 
4538   if (CompressedKlassPointers::base() == nullptr) {
4539     return (_klass_decode_mode = KlassDecodeZero);
4540   }
4541 
4542   if (operand_valid_for_logical_immediate(
4543         /*is32*/false, (uint64_t)CompressedKlassPointers::base())) {
4544     const uint64_t range_mask =
4545       (1ULL << log2i(CompressedKlassPointers::range())) - 1;
4546     if (((uint64_t)CompressedKlassPointers::base() & range_mask) == 0) {
4547       return (_klass_decode_mode = KlassDecodeXor);
4548     }
4549   }
4550 
4551   const uint64_t shifted_base =
4552     (uint64_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
4553   guarantee((shifted_base & 0xffff0000ffffffff) == 0,
4554             "compressed class base bad alignment");
4555 
4556   return (_klass_decode_mode = KlassDecodeMovk);
4557 }
4558 
4559 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {


4560   switch (klass_decode_mode()) {
4561   case KlassDecodeZero:
4562     if (CompressedKlassPointers::shift() != 0) {
4563       lsr(dst, src, LogKlassAlignmentInBytes);
4564     } else {
4565       if (dst != src) mov(dst, src);
4566     }
4567     break;
4568 
4569   case KlassDecodeXor:
4570     if (CompressedKlassPointers::shift() != 0) {
4571       eor(dst, src, (uint64_t)CompressedKlassPointers::base());
4572       lsr(dst, dst, LogKlassAlignmentInBytes);
4573     } else {
4574       eor(dst, src, (uint64_t)CompressedKlassPointers::base());
4575     }
4576     break;
4577 
4578   case KlassDecodeMovk:
4579     if (CompressedKlassPointers::shift() != 0) {
4580       ubfx(dst, src, LogKlassAlignmentInBytes, 32);
4581     } else {
4582       movw(dst, src);
4583     }
4584     break;
4585 
4586   case KlassDecodeNone:
4587     ShouldNotReachHere();
4588     break;
4589   }
4590 }
4591 
4592 void MacroAssembler::encode_klass_not_null(Register r) {
4593   encode_klass_not_null(r, r);
4594 }
4595 
4596 void  MacroAssembler::decode_klass_not_null(Register dst, Register src) {
4597   assert (UseCompressedClassPointers, "should only be used for compressed headers");
4598 


4599   switch (klass_decode_mode()) {
4600   case KlassDecodeZero:
4601     if (CompressedKlassPointers::shift() != 0) {
4602       lsl(dst, src, LogKlassAlignmentInBytes);
4603     } else {
4604       if (dst != src) mov(dst, src);
4605     }
4606     break;
4607 
4608   case KlassDecodeXor:
4609     if (CompressedKlassPointers::shift() != 0) {
4610       lsl(dst, src, LogKlassAlignmentInBytes);
4611       eor(dst, dst, (uint64_t)CompressedKlassPointers::base());
4612     } else {
4613       eor(dst, src, (uint64_t)CompressedKlassPointers::base());
4614     }
4615     break;
4616 
4617   case KlassDecodeMovk: {
4618     const uint64_t shifted_base =
4619       (uint64_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
4620 



4621     if (dst != src) movw(dst, src);
4622     movk(dst, shifted_base >> 32, 32);
4623 
4624     if (CompressedKlassPointers::shift() != 0) {
4625       lsl(dst, dst, LogKlassAlignmentInBytes);
4626     }
4627 
4628     break;
4629   }
4630 
4631   case KlassDecodeNone:
4632     ShouldNotReachHere();
4633     break;
4634   }
4635 }
4636 
4637 void  MacroAssembler::decode_klass_not_null(Register r) {
4638   decode_klass_not_null(r, r);
4639 }
4640 
4641 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
4642 #ifdef ASSERT
4643   {
4644     ThreadInVMfromUnknown tiv;
4645     assert (UseCompressedOops, "should only be used for compressed oops");
4646     assert (Universe::heap() != nullptr, "java heap should be initialized");
4647     assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");

  28 #include "precompiled.hpp"
  29 #include "asm/assembler.hpp"
  30 #include "asm/assembler.inline.hpp"
  31 #include "ci/ciEnv.hpp"
  32 #include "compiler/compileTask.hpp"
  33 #include "compiler/disassembler.hpp"
  34 #include "compiler/oopMap.hpp"
  35 #include "gc/shared/barrierSet.hpp"
  36 #include "gc/shared/barrierSetAssembler.hpp"
  37 #include "gc/shared/cardTableBarrierSet.hpp"
  38 #include "gc/shared/cardTable.hpp"
  39 #include "gc/shared/collectedHeap.hpp"
  40 #include "gc/shared/tlab_globals.hpp"
  41 #include "interpreter/bytecodeHistogram.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "jvm.h"
  44 #include "memory/resourceArea.hpp"
  45 #include "memory/universe.hpp"
  46 #include "nativeInst_aarch64.hpp"
  47 #include "oops/accessDecorators.hpp"
  48 #include "oops/compressedKlass.inline.hpp"
  49 #include "oops/compressedOops.inline.hpp"
  50 #include "oops/klass.inline.hpp"
  51 #include "runtime/continuation.hpp"
  52 #include "runtime/icache.hpp"
  53 #include "runtime/interfaceSupport.inline.hpp"
  54 #include "runtime/javaThread.hpp"
  55 #include "runtime/jniHandles.inline.hpp"
  56 #include "runtime/sharedRuntime.hpp"
  57 #include "runtime/stubRoutines.hpp"
  58 #include "utilities/powerOfTwo.hpp"
  59 #ifdef COMPILER1
  60 #include "c1/c1_LIRAssembler.hpp"
  61 #endif
  62 #ifdef COMPILER2
  63 #include "oops/oop.hpp"
  64 #include "opto/compile.hpp"
  65 #include "opto/node.hpp"
  66 #include "opto/output.hpp"
  67 #endif
  68 

4298   adrp(rscratch1, src2, offset);
4299   ldr(rscratch1, Address(rscratch1, offset));
4300   cmp(src1, rscratch1);
4301 }
4302 
4303 void MacroAssembler::cmpoop(Register obj1, Register obj2) {
4304   cmp(obj1, obj2);
4305 }
4306 
4307 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
4308   load_method_holder(rresult, rmethod);
4309   ldr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
4310 }
4311 
4312 void MacroAssembler::load_method_holder(Register holder, Register method) {
4313   ldr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
4314   ldr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
4315   ldr(holder, Address(holder, ConstantPool::pool_holder_offset_in_bytes())); // InstanceKlass*
4316 }
4317 
4318 // Loads the obj's Klass* into dst.
4319 // Preserves all registers (incl src, rscratch1 and rscratch2).
4320 void MacroAssembler::load_nklass(Register dst, Register src) {
4321   assert(UseCompressedClassPointers, "expects UseCompressedClassPointers");
4322 
4323   if (!UseCompactObjectHeaders) {
4324     ldrw(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4325     return;
4326   }
4327 
4328   Label fast;
4329 
4330   // Check if we can take the (common) fast path, if obj is unlocked.
4331   ldr(dst, Address(src, oopDesc::mark_offset_in_bytes()));
4332   tbz(dst, exact_log2(markWord::monitor_value), fast);
4333 
4334   // Fetch displaced header
4335   ldr(dst, Address(dst, OM_OFFSET_NO_MONITOR_VALUE_TAG(header)));
4336 
4337   // Fast-path: shift and decode Klass*.
4338   bind(fast);
4339   lsr(dst, dst, markWord::klass_shift);
4340 }
4341 
4342 void MacroAssembler::load_klass(Register dst, Register src) {
4343   if (UseCompressedClassPointers) {
4344     if (UseCompactObjectHeaders) {
4345       load_nklass(dst, src);
4346     } else {
4347       ldrw(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4348     }
4349     decode_klass_not_null(dst);
4350   } else {
4351     ldr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
4352   }
4353 }
4354 
4355 // ((OopHandle)result).resolve();
4356 void MacroAssembler::resolve_oop_handle(Register result, Register tmp1, Register tmp2) {
4357   // OopHandle::resolve is an indirection.
4358   access_load_at(T_OBJECT, IN_NATIVE, result, Address(result, 0), tmp1, tmp2);
4359 }
4360 
4361 // ((WeakHandle)result).resolve();
4362 void MacroAssembler::resolve_weak_handle(Register result, Register tmp1, Register tmp2) {
4363   assert_different_registers(result, tmp1, tmp2);
4364   Label resolved;
4365 
4366   // A null weak handle resolves to null.
4367   cbz(result, resolved);
4368 
4369   // Only 64 bit platforms support GCs that require a tmp register
4370   // WeakHandle::resolve is an indirection like jweak.
4371   access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
4372                  result, Address(result), tmp1, tmp2);
4373   bind(resolved);
4374 }
4375 
4376 void MacroAssembler::load_mirror(Register dst, Register method, Register tmp1, Register tmp2) {
4377   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
4378   ldr(dst, Address(rmethod, Method::const_offset()));
4379   ldr(dst, Address(dst, ConstMethod::constants_offset()));
4380   ldr(dst, Address(dst, ConstantPool::pool_holder_offset_in_bytes()));
4381   ldr(dst, Address(dst, mirror_offset));
4382   resolve_oop_handle(dst, tmp1, tmp2);
4383 }
4384 
4385 void MacroAssembler::cmp_klass(Register oop, Register trial_klass, Register tmp) {
4386   assert_different_registers(oop, trial_klass, tmp);
4387   if (UseCompressedClassPointers) {
4388     if (UseCompactObjectHeaders) {
4389       load_nklass(tmp, oop);
4390     } else {
4391       ldrw(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
4392     }
4393     if (CompressedKlassPointers::base() == nullptr) {
4394       cmp(trial_klass, tmp, LSL, CompressedKlassPointers::shift());
4395       return;
4396     } else if (((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
4397                && CompressedKlassPointers::shift() == 0) {
4398       // Only the bottom 32 bits matter
4399       cmpw(trial_klass, tmp);
4400       return;
4401     }
4402     decode_klass_not_null(tmp);
4403   } else {
4404     ldr(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
4405   }
4406   cmp(trial_klass, tmp);
4407 }
4408 
4409 void MacroAssembler::store_klass(Register dst, Register src) {
4410   // FIXME: Should this be a store release?  concurrent gcs assumes
4411   // klass length is valid if klass field is not null.
4412   if (UseCompressedClassPointers) {

4541   // Cannot assert, unverified entry point counts instructions (see .ad file)
4542   // vtableStubs also counts instructions in pd_code_size_limit.
4543   // Also do not verify_oop as this is called by verify_oop.
4544   if (CompressedOops::shift() != 0) {
4545     assert(LogMinObjAlignmentInBytes == CompressedOops::shift(), "decode alg wrong");
4546     if (CompressedOops::base() != nullptr) {
4547       add(dst, rheapbase, src, Assembler::LSL, LogMinObjAlignmentInBytes);
4548     } else {
4549       add(dst, zr, src, Assembler::LSL, LogMinObjAlignmentInBytes);
4550     }
4551   } else {
4552     assert (CompressedOops::base() == nullptr, "sanity");
4553     if (dst != src) {
4554       mov(dst, src);
4555     }
4556   }
4557 }
4558 
4559 MacroAssembler::KlassDecodeMode MacroAssembler::_klass_decode_mode(KlassDecodeNone);
4560 
4561 // Returns a static string
4562 const char* MacroAssembler::describe_klass_decode_mode(MacroAssembler::KlassDecodeMode mode) {
4563   switch (mode) {
4564   case KlassDecodeNone: return "none";
4565   case KlassDecodeZero: return "zero";
4566   case KlassDecodeXor:  return "xor";
4567   case KlassDecodeMovk: return "movk";
4568   default:
4569     ShouldNotReachHere();
4570   }
4571   return NULL;
4572 }
4573 
4574 // Return the current narrow Klass pointer decode mode.
4575 MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode() {
4576   if (_klass_decode_mode == KlassDecodeNone) {
4577     // First time initialization
4578     assert(UseCompressedClassPointers, "not using compressed class pointers");
4579     assert(Metaspace::initialized(), "metaspace not initialized yet");
4580 
4581     _klass_decode_mode = klass_decode_mode_for_base(CompressedKlassPointers::base());
4582     guarantee(_klass_decode_mode != KlassDecodeNone,
4583               PTR_FORMAT " is not a valid encoding base on aarch64",
4584               p2i(CompressedKlassPointers::base()));
4585     log_info(metaspace)("klass decode mode initialized: %s", describe_klass_decode_mode(_klass_decode_mode));
4586   }
4587   return _klass_decode_mode;
4588 }
4589 
4590 // Given an arbitrary base address, return the KlassDecodeMode that would be used. Return KlassDecodeNone
4591 // if base address is not valid for encoding.
4592 MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode_for_base(address base) {
4593 
4594   const uint64_t base_u64 = (uint64_t) base;

4595 
4596   if (base_u64 == 0) {
4597     return KlassDecodeZero;
4598   }
4599 
4600   if (operand_valid_for_logical_immediate(false, base_u64) &&
4601       ((base_u64 & (KlassEncodingMetaspaceMax - 1)) == 0)) {
4602     return KlassDecodeXor;




4603   }
4604 
4605   const uint64_t shifted_base = base_u64 >> LogKlassAlignmentInBytes;
4606   if ((shifted_base & 0xffff0000ffffffff) == 0) {
4607     return KlassDecodeMovk;
4608   }
4609 
4610   return KlassDecodeNone;
4611 }
4612 
4613 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
4614   assert (UseCompressedClassPointers, "should only be used for compressed headers");
4615   assert(CompressedKlassPointers::shift() != 0, "not lilliput?");
4616   switch (klass_decode_mode()) {
4617   case KlassDecodeZero:
4618     lsr(dst, src, LogKlassAlignmentInBytes);




4619     break;
4620 
4621   case KlassDecodeXor:
4622     eor(dst, src, (uint64_t)CompressedKlassPointers::base());
4623     lsr(dst, dst, LogKlassAlignmentInBytes);




4624     break;
4625 
4626   case KlassDecodeMovk:
4627     ubfx(dst, src, LogKlassAlignmentInBytes, MaxNarrowKlassPointerBits);




4628     break;
4629 
4630   case KlassDecodeNone:
4631     ShouldNotReachHere();
4632     break;
4633   }
4634 }
4635 
4636 void MacroAssembler::encode_klass_not_null(Register r) {
4637   encode_klass_not_null(r, r);
4638 }
4639 
4640 void  MacroAssembler::decode_klass_not_null(Register dst, Register src) {
4641   assert (UseCompressedClassPointers, "should only be used for compressed headers");
4642 
4643   assert(CompressedKlassPointers::shift() != 0, "not lilliput?");
4644 
4645   switch (klass_decode_mode()) {
4646   case KlassDecodeZero:
4647     if (dst != src) mov(dst, src);




4648     break;
4649 
4650   case KlassDecodeXor:
4651     lsl(dst, src, LogKlassAlignmentInBytes);
4652     eor(dst, dst, (uint64_t)CompressedKlassPointers::base());




4653     break;
4654 
4655   case KlassDecodeMovk: {
4656     const uint64_t shifted_base =
4657       (uint64_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
4658 
4659     // Invalid base should have been gracefully handled via klass_decode_mode() in VM initialization.
4660     assert((shifted_base & 0xffff0000ffffffff) == 0, "incompatible base");
4661 
4662     if (dst != src) movw(dst, src);
4663     movk(dst, shifted_base >> 32, 32);
4664     lsl(dst, dst, LogKlassAlignmentInBytes);




4665     break;
4666   }
4667 
4668   case KlassDecodeNone:
4669     ShouldNotReachHere();
4670     break;
4671   }
4672 }
4673 
4674 void  MacroAssembler::decode_klass_not_null(Register r) {
4675   decode_klass_not_null(r, r);
4676 }
4677 
4678 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
4679 #ifdef ASSERT
4680   {
4681     ThreadInVMfromUnknown tiv;
4682     assert (UseCompressedOops, "should only be used for compressed oops");
4683     assert (Universe::heap() != nullptr, "java heap should be initialized");
4684     assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
< prev index next >