5412 BLOCK_COMMENT("encode_klass_not_null {");
5413 assert_different_registers(r, tmp);
5414 if (CompressedKlassPointers::base() != nullptr) {
5415 if (AOTCodeCache::is_on_for_dump()) {
5416 movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
5417 } else {
5418 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5419 }
5420 subq(r, tmp);
5421 }
5422 if (CompressedKlassPointers::shift() != 0) {
5423 shrq(r, CompressedKlassPointers::shift());
5424 }
5425 BLOCK_COMMENT("} encode_klass_not_null");
5426 }
5427
5428 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5429 BLOCK_COMMENT("encode_and_move_klass_not_null {");
5430 assert_different_registers(src, dst);
5431 if (CompressedKlassPointers::base() != nullptr) {
5432 movptr(dst, -(intptr_t)CompressedKlassPointers::base());
5433 addq(dst, src);
5434 } else {
5435 movptr(dst, src);
5436 }
5437 if (CompressedKlassPointers::shift() != 0) {
5438 shrq(dst, CompressedKlassPointers::shift());
5439 }
5440 BLOCK_COMMENT("} encode_and_move_klass_not_null");
5441 }
5442
5443 void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5444 BLOCK_COMMENT("decode_klass_not_null {");
5445 assert_different_registers(r, tmp);
5446 // Note: it will change flags
5447 assert(UseCompressedClassPointers, "should only be used for compressed headers");
5448 // Cannot assert, unverified entry point counts instructions (see .ad file)
5449 // vtableStubs also counts instructions in pd_code_size_limit.
5450 // Also do not verify_oop as this is called by verify_oop.
5451 if (CompressedKlassPointers::shift() != 0) {
5452 shlq(r, CompressedKlassPointers::shift());
5458 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5459 }
5460 addq(r, tmp);
5461 }
5462 BLOCK_COMMENT("} decode_klass_not_null");
5463 }
5464
5465 void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5466 BLOCK_COMMENT("decode_and_move_klass_not_null {");
5467 assert_different_registers(src, dst);
5468 // Note: it will change flags
5469 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5470 // Cannot assert, unverified entry point counts instructions (see .ad file)
5471 // vtableStubs also counts instructions in pd_code_size_limit.
5472 // Also do not verify_oop as this is called by verify_oop.
5473
5474 if (CompressedKlassPointers::base() == nullptr &&
5475 CompressedKlassPointers::shift() == 0) {
5476 // The best case scenario is that there is no base or shift. Then it is already
5477 // a pointer that needs nothing but a register rename.
5478 movl(dst, src);
5479 } else {
5480 if (CompressedKlassPointers::shift() <= Address::times_8) {
5481 if (CompressedKlassPointers::base() != nullptr) {
5482 movptr(dst, (intptr_t)CompressedKlassPointers::base());
5483 } else {
5484 xorq(dst, dst);
5485 }
5486 if (CompressedKlassPointers::shift() != 0) {
5487 assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
5488 leaq(dst, Address(dst, src, Address::times_8, 0));
5489 } else {
5490 addq(dst, src);
5491 }
5492 } else {
5493 if (CompressedKlassPointers::base() != nullptr) {
5494 const intptr_t base_right_shifted =
5495 (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
5496 movptr(dst, base_right_shifted);
5497 } else {
5498 xorq(dst, dst);
5499 }
5500 addq(dst, src);
5501 shlq(dst, CompressedKlassPointers::shift());
5502 }
5503 }
5504 BLOCK_COMMENT("} decode_and_move_klass_not_null");
5505 }
5506
5507 void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5508 assert (UseCompressedOops, "should only be used for compressed headers");
5509 assert (Universe::heap() != nullptr, "java heap should be initialized");
5510 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5511 int oop_index = oop_recorder()->find_index(obj);
5512 RelocationHolder rspec = oop_Relocation::spec(oop_index);
5513 mov_narrow_oop(dst, oop_index, rspec);
5514 }
5515
5516 void MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5557 }
5558
5559 void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
5560 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5561 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5562 int klass_index = oop_recorder()->find_index(k);
5563 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5564 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5565 }
5566
5567 void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
5568 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5569 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5570 int klass_index = oop_recorder()->find_index(k);
5571 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5572 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5573 }
5574
5575 void MacroAssembler::reinit_heapbase() {
5576 if (UseCompressedOops) {
5577 if (Universe::heap() != nullptr) {
5578 if (CompressedOops::base() == nullptr) {
5579 MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
5580 } else {
5581 mov64(r12_heapbase, (int64_t)CompressedOops::base());
5582 }
5583 } else {
5584 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5585 }
5586 }
5587 }
5588
5589 #if COMPILER2_OR_JVMCI
5590
5591 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
5592 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
5593 // cnt - number of qwords (8-byte words).
5594 // base - start address, qword aligned.
5595 Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
5596 bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
5597 if (use64byteVector) {
5598 vpxor(xtmp, xtmp, xtmp, AVX_512bit);
5599 } else if (MaxVectorSize >= 32) {
9798 // Resotres back legacy GPRs state from stack.
9799 void MacroAssembler::restore_legacy_gprs() {
9800 movq(r15, Address(rsp, 0));
9801 movq(r14, Address(rsp, wordSize));
9802 movq(r13, Address(rsp, 2 * wordSize));
9803 movq(r12, Address(rsp, 3 * wordSize));
9804 movq(r11, Address(rsp, 4 * wordSize));
9805 movq(r10, Address(rsp, 5 * wordSize));
9806 movq(r9, Address(rsp, 6 * wordSize));
9807 movq(r8, Address(rsp, 7 * wordSize));
9808 movq(rdi, Address(rsp, 8 * wordSize));
9809 movq(rsi, Address(rsp, 9 * wordSize));
9810 movq(rbp, Address(rsp, 10 * wordSize));
9811 movq(rbx, Address(rsp, 12 * wordSize));
9812 movq(rdx, Address(rsp, 13 * wordSize));
9813 movq(rcx, Address(rsp, 14 * wordSize));
9814 movq(rax, Address(rsp, 15 * wordSize));
9815 addq(rsp, 16 * wordSize);
9816 }
9817
9818 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
9819 if (VM_Version::supports_apx_f()) {
9820 esetzucc(comparison, dst);
9821 } else {
9822 setb(comparison, dst);
9823 movzbl(dst, dst);
9824 }
9825 }
|
5412 BLOCK_COMMENT("encode_klass_not_null {");
5413 assert_different_registers(r, tmp);
5414 if (CompressedKlassPointers::base() != nullptr) {
5415 if (AOTCodeCache::is_on_for_dump()) {
5416 movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr()));
5417 } else {
5418 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5419 }
5420 subq(r, tmp);
5421 }
5422 if (CompressedKlassPointers::shift() != 0) {
5423 shrq(r, CompressedKlassPointers::shift());
5424 }
5425 BLOCK_COMMENT("} encode_klass_not_null");
5426 }
5427
5428 void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
5429 BLOCK_COMMENT("encode_and_move_klass_not_null {");
5430 assert_different_registers(src, dst);
5431 if (CompressedKlassPointers::base() != nullptr) {
5432 if (AOTCodeCache::is_on_for_dump()) {
5433 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5434 negq(dst);
5435 } else {
5436 movptr(dst, -(intptr_t)CompressedKlassPointers::base());
5437 }
5438 addq(dst, src);
5439 } else {
5440 movptr(dst, src);
5441 }
5442 if (CompressedKlassPointers::shift() != 0) {
5443 shrq(dst, CompressedKlassPointers::shift());
5444 }
5445 BLOCK_COMMENT("} encode_and_move_klass_not_null");
5446 }
5447
5448 void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
5449 BLOCK_COMMENT("decode_klass_not_null {");
5450 assert_different_registers(r, tmp);
5451 // Note: it will change flags
5452 assert(UseCompressedClassPointers, "should only be used for compressed headers");
5453 // Cannot assert, unverified entry point counts instructions (see .ad file)
5454 // vtableStubs also counts instructions in pd_code_size_limit.
5455 // Also do not verify_oop as this is called by verify_oop.
5456 if (CompressedKlassPointers::shift() != 0) {
5457 shlq(r, CompressedKlassPointers::shift());
5463 movptr(tmp, (intptr_t)CompressedKlassPointers::base());
5464 }
5465 addq(r, tmp);
5466 }
5467 BLOCK_COMMENT("} decode_klass_not_null");
5468 }
5469
5470 void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src) {
5471 BLOCK_COMMENT("decode_and_move_klass_not_null {");
5472 assert_different_registers(src, dst);
5473 // Note: it will change flags
5474 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5475 // Cannot assert, unverified entry point counts instructions (see .ad file)
5476 // vtableStubs also counts instructions in pd_code_size_limit.
5477 // Also do not verify_oop as this is called by verify_oop.
5478
5479 if (CompressedKlassPointers::base() == nullptr &&
5480 CompressedKlassPointers::shift() == 0) {
5481 // The best case scenario is that there is no base or shift. Then it is already
5482 // a pointer that needs nothing but a register rename.
5483 movptr(dst, src);
5484 } else {
5485 if (CompressedKlassPointers::shift() <= Address::times_8) {
5486 if (CompressedKlassPointers::base() != nullptr) {
5487 if (AOTCodeCache::is_on_for_dump()) {
5488 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5489 } else {
5490 movptr(dst, (intptr_t)CompressedKlassPointers::base());
5491 }
5492 } else {
5493 xorq(dst, dst);
5494 }
5495 if (CompressedKlassPointers::shift() != 0) {
5496 assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
5497 leaq(dst, Address(dst, src, Address::times_8, 0));
5498 } else {
5499 addq(dst, src);
5500 }
5501 } else {
5502 if (CompressedKlassPointers::base() != nullptr) {
5503 if (AOTCodeCache::is_on_for_dump()) {
5504 movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
5505 shrq(dst, CompressedKlassPointers::shift());
5506 } else {
5507 const intptr_t base_right_shifted =
5508 (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
5509 movptr(dst, base_right_shifted);
5510 }
5511 } else {
5512 xorq(dst, dst);
5513 }
5514 addq(dst, src);
5515 shlq(dst, CompressedKlassPointers::shift());
5516 }
5517 }
5518 BLOCK_COMMENT("} decode_and_move_klass_not_null");
5519 }
5520
5521 void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
5522 assert (UseCompressedOops, "should only be used for compressed headers");
5523 assert (Universe::heap() != nullptr, "java heap should be initialized");
5524 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5525 int oop_index = oop_recorder()->find_index(obj);
5526 RelocationHolder rspec = oop_Relocation::spec(oop_index);
5527 mov_narrow_oop(dst, oop_index, rspec);
5528 }
5529
5530 void MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
5571 }
5572
5573 void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
5574 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5575 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5576 int klass_index = oop_recorder()->find_index(k);
5577 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5578 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5579 }
5580
5581 void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
5582 assert (UseCompressedClassPointers, "should only be used for compressed headers");
5583 assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
5584 int klass_index = oop_recorder()->find_index(k);
5585 RelocationHolder rspec = metadata_Relocation::spec(klass_index);
5586 Assembler::cmp_narrow_oop(dst, CompressedKlassPointers::encode(k), rspec);
5587 }
5588
5589 void MacroAssembler::reinit_heapbase() {
5590 if (UseCompressedOops) {
5591 if (Universe::heap() != nullptr) { // GC was initialized
5592 if (CompressedOops::base() == nullptr) {
5593 MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
5594 } else if (AOTCodeCache::is_on_for_dump()) {
5595 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5596 } else {
5597 mov64(r12_heapbase, (int64_t)CompressedOops::base());
5598 }
5599 } else {
5600 movptr(r12_heapbase, ExternalAddress(CompressedOops::base_addr()));
5601 }
5602 }
5603 }
5604
5605 #if COMPILER2_OR_JVMCI
5606
5607 // clear memory of size 'cnt' qwords, starting at 'base' using XMM/YMM/ZMM registers
5608 void MacroAssembler::xmm_clear_mem(Register base, Register cnt, Register rtmp, XMMRegister xtmp, KRegister mask) {
5609 // cnt - number of qwords (8-byte words).
5610 // base - start address, qword aligned.
5611 Label L_zero_64_bytes, L_loop, L_sloop, L_tail, L_end;
5612 bool use64byteVector = (MaxVectorSize == 64) && (VM_Version::avx3_threshold() == 0);
5613 if (use64byteVector) {
5614 vpxor(xtmp, xtmp, xtmp, AVX_512bit);
5615 } else if (MaxVectorSize >= 32) {
9814 // Resotres back legacy GPRs state from stack.
9815 void MacroAssembler::restore_legacy_gprs() {
9816 movq(r15, Address(rsp, 0));
9817 movq(r14, Address(rsp, wordSize));
9818 movq(r13, Address(rsp, 2 * wordSize));
9819 movq(r12, Address(rsp, 3 * wordSize));
9820 movq(r11, Address(rsp, 4 * wordSize));
9821 movq(r10, Address(rsp, 5 * wordSize));
9822 movq(r9, Address(rsp, 6 * wordSize));
9823 movq(r8, Address(rsp, 7 * wordSize));
9824 movq(rdi, Address(rsp, 8 * wordSize));
9825 movq(rsi, Address(rsp, 9 * wordSize));
9826 movq(rbp, Address(rsp, 10 * wordSize));
9827 movq(rbx, Address(rsp, 12 * wordSize));
9828 movq(rdx, Address(rsp, 13 * wordSize));
9829 movq(rcx, Address(rsp, 14 * wordSize));
9830 movq(rax, Address(rsp, 15 * wordSize));
9831 addq(rsp, 16 * wordSize);
9832 }
9833
9834 void MacroAssembler::load_aotrc_address(Register reg, address a) {
9835 #if INCLUDE_CDS
9836 assert(AOTRuntimeConstants::contains(a), "address out of range for data area");
9837 if (AOTCodeCache::is_on_for_dump()) {
9838 // all aotrc field addresses should be registered in the AOTCodeCache address table
9839 lea(reg, ExternalAddress(a));
9840 } else {
9841 mov64(reg, (uint64_t)a);
9842 }
9843 #else
9844 ShouldNotReachHere();
9845 #endif
9846 }
9847
9848 void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
9849 if (VM_Version::supports_apx_f()) {
9850 esetzucc(comparison, dst);
9851 } else {
9852 setb(comparison, dst);
9853 movzbl(dst, dst);
9854 }
9855 }
|