5418 BLOCK_COMMENT("encode_klass_not_null {");
5419 assert_different_registers(r, tmp);
5420 if (CompressedKlassPointers::base() != nullptr) {
5421 if (AOTCodeCache::is_on_for_dump()) {
5422 movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
5423 } else {
5424 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5425 }
5426 subq(r, tmp);
5427 }
5428 if (CompressedKlassPointers::shift() != 0) {
5429 shrq(r, CompressedKlassPointers::shift());
5430 }
5431 BLOCK_COMMENT("} encode_klass_not_null");
5432 }
5433
5434 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5435 BLOCK_COMMENT("encode_and_move_klass_not_null {");
5436 assert_different_registers(src, dst);
5437 if (CompressedKlassPointers::base() != nullptr) {
5438 movptr(dst, -(intptr_t)CompressedKlassPointers::base());
5439 addq(dst, src);
5440 } else {
5441 movptr(dst, src);
5442 }
5443 if (CompressedKlassPointers::shift() != 0) {
5444 shrq(dst, CompressedKlassPointers::shift());
5445 }
5446 BLOCK_COMMENT("} encode_and_move_klass_not_null");
5447 }
5448
5449 void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5450 BLOCK_COMMENT("decode_klass_not_null {");
5451 assert_different_registers(r, tmp);
5452 // Note: it will change flags
5453 assert(UseCompressedClassPointers, "should only be used for compressed headers");
5454 // Cannot assert, unverified entry point counts instructions (see .ad file)
5455 // vtableStubs also counts instructions in pd_code_size_limit.
5456 // Also do not verify_oop as this is called by verify_oop.
5457 if (CompressedKlassPointers::shift() != 0) {
5458 shlq(r, CompressedKlassPointers::shift());
5464 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5465 }
5466 addq(r, tmp);
5467 }
5468 BLOCK_COMMENT("} decode_klass_not_null");
5469 }
5470
5471 void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5472 BLOCK_COMMENT("decode_and_move_klass_not_null {");
5473 assert_different_registers(src, dst);
5474 // Note: it will change flags
5475 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5476 // Cannot assert, unverified entry point counts instructions (see .ad file)
5477 // vtableStubs also counts instructions in pd_code_size_limit.
5478 // Also do not verify_oop as this is called by verify_oop.
5479
5480 if (CompressedKlassPointers::base() == nullptr &&
5481 CompressedKlassPointers::shift() == 0) {
5482 // The best case scenario is that there is no base or shift. Then it is already
5483 // a pointer that needs nothing but a register rename.
5484 movl(dst, src);
5485 } else {
5486 if (CompressedKlassPointers::shift() <= Address::times_8) {
5487 if (CompressedKlassPointers::base() != nullptr) {
5488 movptr(dst, (intptr_t)CompressedKlassPointers::base());
5489 } else {
5490 xorq(dst, dst);
5491 }
5492 if (CompressedKlassPointers::shift() != 0) {
5493 assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
5494 leaq(dst, Address(dst, src, Address::times_8, 0));
5495 } else {
5496 addq(dst, src);
5497 }
5498 } else {
5499 if (CompressedKlassPointers::base() != nullptr) {
5500 const intptr_t base_right_shifted =
5501 (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
5502 movptr(dst, base_right_shifted);
5503 } else {
5504 xorq(dst, dst);
5505 }
5506 addq(dst, src);
5507 shlq(dst, CompressedKlassPointers::shift());
5508 }
5509 }
5510 BLOCK_COMMENT("} decode_and_move_klass_not_null");
5511 }
5512
5513 void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5514 assert (UseCompressedOops, "should only be used for compressed headers");
5515 assert (Universe::heap() != nullptr, "java heap should be initialized");
5516 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5517 int oop_index = oop_recorder()->find_index(obj);
5518 RelocationHolder rspec = oop_Relocation::spec(oop_index);
5519 mov_narrow_oop(dst, oop_index, rspec);
5520 }
5521
5522 void MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5563 }
5564
5565 void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
5566 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5567 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5568 int klass_index = oop_recorder()->find_index(k);
5569 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5570 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5571 }
5572
5573 void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
5574 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5575 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5576 int klass_index = oop_recorder()->find_index(k);
5577 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5578 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5579 }
5580
5581 void MacroAssembler::reinit_heapbase() {
5582 if (UseCompressedOops) {
5583 if (Universe::heap() != nullptr) {
5584 if (CompressedOops::base() == nullptr) {
5585 MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
5586 } else {
5587 mov64(r12_heapbase, (int64_t)CompressedOops::base());
5588 }
5589 } else {
5590 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5591 }
5592 }
5593 }
5594
5595 #if COMPILER2_OR_JVMCI
5596
5597 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
5598 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
5599 // cnt - number of qwords (8-byte words).
5600 // base - start address, qword aligned.
5601 Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
5602 bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
5603 if (use64byteVector) {
5604 vpxor(xtmp, xtmp, xtmp, AVX_512bit);
5605 } else if (MaxVectorSize >= 32) {
9771 // Resotres back legacy GPRs state from stack.
9772 void MacroAssembler::restore_legacy_gprs() {
9773 movq(r15, Address(rsp, 0));
9774 movq(r14, Address(rsp, wordSize));
9775 movq(r13, Address(rsp, 2 * wordSize));
9776 movq(r12, Address(rsp, 3 * wordSize));
9777 movq(r11, Address(rsp, 4 * wordSize));
9778 movq(r10, Address(rsp, 5 * wordSize));
9779 movq(r9, Address(rsp, 6 * wordSize));
9780 movq(r8, Address(rsp, 7 * wordSize));
9781 movq(rdi, Address(rsp, 8 * wordSize));
9782 movq(rsi, Address(rsp, 9 * wordSize));
9783 movq(rbp, Address(rsp, 10 * wordSize));
9784 movq(rbx, Address(rsp, 12 * wordSize));
9785 movq(rdx, Address(rsp, 13 * wordSize));
9786 movq(rcx, Address(rsp, 14 * wordSize));
9787 movq(rax, Address(rsp, 15 * wordSize));
9788 addq(rsp, 16 * wordSize);
9789 }
9790
9791 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
9792 if (VM_Version::supports_apx_f()) {
9793 esetzucc(comparison, dst);
9794 } else {
9795 setb(comparison, dst);
9796 movzbl(dst, dst);
9797 }
9798 }
|
5418 BLOCK_COMMENT("encode_klass_not_null {");
5419 assert_different_registers(r, tmp);
5420 if (CompressedKlassPointers::base() != nullptr) {
5421 if (AOTCodeCache::is_on_for_dump()) {
5422 movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
5423 } else {
5424 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5425 }
5426 subq(r, tmp);
5427 }
5428 if (CompressedKlassPointers::shift() != 0) {
5429 shrq(r, CompressedKlassPointers::shift());
5430 }
5431 BLOCK_COMMENT("} encode_klass_not_null");
5432 }
5433
5434 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5435 BLOCK_COMMENT("encode_and_move_klass_not_null {");
5436 assert_different_registers(src, dst);
5437 if (CompressedKlassPointers::base() != nullptr) {
5438 if (AOTCodeCache::is_on_for_dump()) {
5439 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5440 negq(dst);
5441 } else {
5442 movptr(dst, -(intptr_t)CompressedKlassPointers::base());
5443 }
5444 addq(dst, src);
5445 } else {
5446 movptr(dst, src);
5447 }
5448 if (CompressedKlassPointers::shift() != 0) {
5449 shrq(dst, CompressedKlassPointers::shift());
5450 }
5451 BLOCK_COMMENT("} encode_and_move_klass_not_null");
5452 }
5453
5454 void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5455 BLOCK_COMMENT("decode_klass_not_null {");
5456 assert_different_registers(r, tmp);
5457 // Note: it will change flags
5458 assert(UseCompressedClassPointers, "should only be used for compressed headers");
5459 // Cannot assert, unverified entry point counts instructions (see .ad file)
5460 // vtableStubs also counts instructions in pd_code_size_limit.
5461 // Also do not verify_oop as this is called by verify_oop.
5462 if (CompressedKlassPointers::shift() != 0) {
5463 shlq(r, CompressedKlassPointers::shift());
5469 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5470 }
5471 addq(r, tmp);
5472 }
5473 BLOCK_COMMENT("} decode_klass_not_null");
5474 }
5475
5476 void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5477 BLOCK_COMMENT("decode_and_move_klass_not_null {");
5478 assert_different_registers(src, dst);
5479 // Note: it will change flags
5480 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5481 // Cannot assert, unverified entry point counts instructions (see .ad file)
5482 // vtableStubs also counts instructions in pd_code_size_limit.
5483 // Also do not verify_oop as this is called by verify_oop.
5484
5485 if (CompressedKlassPointers::base() == nullptr &&
5486 CompressedKlassPointers::shift() == 0) {
5487 // The best case scenario is that there is no base or shift. Then it is already
5488 // a pointer that needs nothing but a register rename.
5489 movptr(dst, src);
5490 } else {
5491 if (CompressedKlassPointers::shift() <= Address::times_8) {
5492 if (CompressedKlassPointers::base() != nullptr) {
5493 if (AOTCodeCache::is_on_for_dump()) {
5494 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5495 } else {
5496 movptr(dst, (intptr_t)CompressedKlassPointers::base());
5497 }
5498 } else {
5499 xorq(dst, dst);
5500 }
5501 if (CompressedKlassPointers::shift() != 0) {
5502 assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
5503 leaq(dst, Address(dst, src, Address::times_8, 0));
5504 } else {
5505 addq(dst, src);
5506 }
5507 } else {
5508 if (CompressedKlassPointers::base() != nullptr) {
5509 if (AOTCodeCache::is_on_for_dump()) {
5510 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5511 shrq(dst, CompressedKlassPointers::shift());
5512 } else {
5513 const intptr_t base_right_shifted =
5514 (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
5515 movptr(dst, base_right_shifted);
5516 }
5517 } else {
5518 xorq(dst, dst);
5519 }
5520 addq(dst, src);
5521 shlq(dst, CompressedKlassPointers::shift());
5522 }
5523 }
5524 BLOCK_COMMENT("} decode_and_move_klass_not_null");
5525 }
5526
5527 void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5528 assert (UseCompressedOops, "should only be used for compressed headers");
5529 assert (Universe::heap() != nullptr, "java heap should be initialized");
5530 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5531 int oop_index = oop_recorder()->find_index(obj);
5532 RelocationHolder rspec = oop_Relocation::spec(oop_index);
5533 mov_narrow_oop(dst, oop_index, rspec);
5534 }
5535
5536 void MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5577 }
5578
5579 void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
5580 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5581 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5582 int klass_index = oop_recorder()->find_index(k);
5583 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5584 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5585 }
5586
5587 void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
5588 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5589 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5590 int klass_index = oop_recorder()->find_index(k);
5591 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5592 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5593 }
5594
5595 void MacroAssembler::reinit_heapbase() {
5596 if (UseCompressedOops) {
5597 if (Universe::heap() != nullptr) { // GC was initialized
5598 if (CompressedOops::base() == nullptr) {
5599 MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
5600 } else if (AOTCodeCache::is_on_for_dump()) {
5601 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5602 } else {
5603 mov64(r12_heapbase, (int64_t)CompressedOops::base());
5604 }
5605 } else {
5606 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5607 }
5608 }
5609 }
5610
5611 #if COMPILER2_OR_JVMCI
5612
5613 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
5614 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
5615 // cnt - number of qwords (8-byte words).
5616 // base - start address, qword aligned.
5617 Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
5618 bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
5619 if (use64byteVector) {
5620 vpxor(xtmp, xtmp, xtmp, AVX_512bit);
5621 } else if (MaxVectorSize >= 32) {
9787 // Resotres back legacy GPRs state from stack.
9788 void MacroAssembler::restore_legacy_gprs() {
9789 movq(r15, Address(rsp, 0));
9790 movq(r14, Address(rsp, wordSize));
9791 movq(r13, Address(rsp, 2 * wordSize));
9792 movq(r12, Address(rsp, 3 * wordSize));
9793 movq(r11, Address(rsp, 4 * wordSize));
9794 movq(r10, Address(rsp, 5 * wordSize));
9795 movq(r9, Address(rsp, 6 * wordSize));
9796 movq(r8, Address(rsp, 7 * wordSize));
9797 movq(rdi, Address(rsp, 8 * wordSize));
9798 movq(rsi, Address(rsp, 9 * wordSize));
9799 movq(rbp, Address(rsp, 10 * wordSize));
9800 movq(rbx, Address(rsp, 12 * wordSize));
9801 movq(rdx, Address(rsp, 13 * wordSize));
9802 movq(rcx, Address(rsp, 14 * wordSize));
9803 movq(rax, Address(rsp, 15 * wordSize));
9804 addq(rsp, 16 * wordSize);
9805 }
9806
9807 void MacroAssembler::load_aotrc_address(Register reg, address a) {
9808 #if INCLUDE_CDS
9809 assert(AOTRuntimeConstants::contains(a), "address out of range for data area");
9810 if (AOTCodeCache::is_on_for_dump()) {
9811 // all aotrc field addresses should be registered in the AOTCodeCache address table
9812 lea(reg, ExternalAddress(a));
9813 } else {
9814 mov64(reg, (uint64_t)a);
9815 }
9816 #else
9817 ShouldNotReachHere();
9818 #endif
9819 }
9820
9821 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
9822 if (VM_Version::supports_apx_f()) {
9823 esetzucc(comparison, dst);
9824 } else {
9825 setb(comparison, dst);
9826 movzbl(dst, dst);
9827 }
9828 }
|