< prev index next >

src/hotspot/cpu/x86/gc/shenandoah/shenandoahBarrierSetAssembler_x86.cpp

Print this page




 384   __ testb(gc_state, ShenandoahHeap::HAS_FORWARDED);
 385   __ jccb(Assembler::zero, done);
 386 
 387    if (dst != rax) {
 388      __ xchgptr(dst, rax); // Move obj into rax and save rax into obj.
 389    }
 390 
 391    __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, ShenandoahBarrierSetAssembler::shenandoah_lrb())));
 392 
 393    if (dst != rax) {
 394      __ xchgptr(rax, dst); // Swap back obj with rax.
 395    }
 396 
 397   __ bind(done);
 398 
 399 #ifndef _LP64
 400   __ pop(thread);
 401 #endif
 402 }
 403 















































 404 void ShenandoahBarrierSetAssembler::load_reference_barrier_native(MacroAssembler* masm, Register dst) {
 405   if (!ShenandoahLoadRefBarrier) {
 406     return;
 407   }
 408 
 409   Label done;
 410   Label not_null;
 411   Label slow_path;
 412 
 413   // null check
 414   __ testptr(dst, dst);
 415   __ jcc(Assembler::notZero, not_null);
 416   __ jmp(done);
 417   __ bind(not_null);
 418 
 419 
 420 #ifdef _LP64
 421   Register thread = r15_thread;
 422 #else
 423   Register thread = rcx;


 771   Register pre_val_reg = stub->pre_val()->as_register();
 772 
 773   if (stub->do_load()) {
 774     ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
 775   }
 776 
 777   __ cmpptr(pre_val_reg, (int32_t)NULL_WORD);
 778   __ jcc(Assembler::equal, *stub->continuation());
 779   ce->store_parameter(stub->pre_val()->as_register(), 0);
 780   __ call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
 781   __ jmp(*stub->continuation());
 782 
 783 }
 784 
 785 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {
 786   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 787   __ bind(*stub->entry());
 788 
 789   Register obj = stub->obj()->as_register();
 790   Register res = stub->result()->as_register();

 791   Register tmp1 = stub->tmp1()->as_register();
 792   Register tmp2 = stub->tmp2()->as_register();

 793 
 794   Label slow_path;
 795 
 796   assert(res == rax, "result must arrive in rax");
 797 
 798   if (res != obj) {
 799     __ mov(res, obj);
 800   }
 801 
 802   // Check for null.
 803   __ testptr(res, res);
 804   __ jcc(Assembler::zero, *stub->continuation());
 805 
 806   // Check for object being in the collection set.
 807   __ mov(tmp1, res);
 808   __ shrptr(tmp1, ShenandoahHeapRegion::region_size_bytes_shift_jint());
 809   __ movptr(tmp2, (intptr_t) ShenandoahHeap::in_cset_fast_test_addr());
 810 #ifdef _LP64
 811   __ movbool(tmp2, Address(tmp2, tmp1, Address::times_1));
 812   __ testbool(tmp2);
 813 #else
 814   // On x86_32, C1 register allocator can give us the register without 8-bit support.
 815   // Do the full-register access and test to avoid compilation failures.
 816   __ movptr(tmp2, Address(tmp2, tmp1, Address::times_1));
 817   __ testptr(tmp2, 0xFF);
 818 #endif
 819   __ jcc(Assembler::zero, *stub->continuation());
 820 
 821   // Test if object is resolved.
 822   __ movptr(tmp1, Address(res, oopDesc::mark_offset_in_bytes()));
 823   // Test if both lowest bits are set. We trick it by negating the bits
 824   // then test for both bits clear.
 825   __ notptr(tmp1);
 826 #ifdef _LP64
 827   __ testb(tmp1, markOopDesc::marked_value);
 828 #else
 829   // On x86_32, C1 register allocator can give us the register without 8-bit support.
 830   // Do the full-register access and test to avoid compilation failures.
 831   __ testptr(tmp1, markOopDesc::marked_value);
 832 #endif
 833   __ jccb(Assembler::notZero, slow_path);
 834   // Clear both lower bits. It's still inverted, so set them, and then invert back.
 835   __ orptr(tmp1, markOopDesc::marked_value);
 836   __ notptr(tmp1);
 837   // At this point, tmp1 contains the decoded forwarding pointer.
 838   __ mov(res, tmp1);
 839 
 840   __ jmp(*stub->continuation());
 841 
 842   __ bind(slow_path);
 843   ce->store_parameter(res, 0);

 844   __ call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
 845 
 846   __ jmp(*stub->continuation());
 847 }
 848 
 849 #undef __
 850 
 851 #define __ sasm->
 852 
 853 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
 854   __ prologue("shenandoah_pre_barrier", false);
 855   // arg0 : previous value of memory
 856 
 857   __ push(rax);
 858   __ push(rdx);
 859 
 860   const Register pre_val = rax;
 861   const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
 862   const Register tmp = rdx;
 863 


 894 
 895   // load the pre-value
 896   __ load_parameter(0, rcx);
 897   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), rcx, thread);
 898 
 899   __ restore_live_registers(true);
 900 
 901   __ bind(done);
 902 
 903   __ pop(rdx);
 904   __ pop(rax);
 905 
 906   __ epilogue();
 907 }
 908 
 909 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm) {
 910   __ prologue("shenandoah_load_reference_barrier", false);
 911   // arg0 : object to be resolved
 912 
 913   __ save_live_registers_no_oop_map(true);
 914   __ load_parameter(0, LP64_ONLY(c_rarg0) NOT_LP64(rax));
 915   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier), LP64_ONLY(c_rarg0) NOT_LP64(rax));













 916   __ restore_live_registers_except_rax(true);
 917 
 918   __ epilogue();
 919 }
 920 
 921 #undef __
 922 
 923 #endif // COMPILER1
 924 
 925 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
 926   assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
 927   return _shenandoah_lrb;
 928 }
 929 
 930 #define __ cgen->assembler()->
 931 
 932 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
 933   __ align(CodeEntryAlignment);
 934   StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
 935   address start = __ pc();




 384   __ testb(gc_state, ShenandoahHeap::HAS_FORWARDED);
 385   __ jccb(Assembler::zero, done);
 386 
 387    if (dst != rax) {
 388      __ xchgptr(dst, rax); // Move obj into rax and save rax into obj.
 389    }
 390 
 391    __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, ShenandoahBarrierSetAssembler::shenandoah_lrb())));
 392 
 393    if (dst != rax) {
 394      __ xchgptr(rax, dst); // Swap back obj with rax.
 395    }
 396 
 397   __ bind(done);
 398 
 399 #ifndef _LP64
 400   __ pop(thread);
 401 #endif
 402 }
 403 
 404 // ((WeakHandle)result).peek();
 405 void ShenandoahBarrierSetAssembler::peek_weak_handle(MacroAssembler* masm, Register rresult, Register rtmp) {
 406   assert_different_registers(rresult, rtmp);
 407   Label resolved;
 408 
 409   // A null weak handle resolves to null.
 410   __ cmpptr(rresult, 0);
 411   __ jcc(Assembler::equal, resolved);
 412 
 413   // Only 64 bit platforms support GCs that require a tmp register
 414   // Only IN_HEAP loads require a thread_tmp register
 415   // WeakHandle::peek is an indirection like jweak.
 416   __ access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE,
 417                  rresult, Address(rresult, 0), rtmp, /*tmp_thread*/noreg);
 418   __ bind(resolved);
 419 }
 420 
 421 void ShenandoahBarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {
 422   BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod();
 423   if (bs == NULL) {
 424     return;
 425   }
 426 
 427   Label bad_call;
 428   __ cmpptr(rbx, 0); // rbx contains the incoming method for c2i adapters.
 429   __ jcc(Assembler::equal, bad_call);
 430 
 431   // Pointer chase to the method holder to find out if the method is concurrently unloading.
 432   Label method_live;
 433   __ load_method_holder_cld(rscratch1, rbx);
 434 
 435   // Is it a strong CLD?
 436   __ movl(rscratch2, Address(rscratch1, ClassLoaderData::keep_alive_offset()));
 437   __ cmpptr(rscratch2, 0);
 438   __ jcc(Assembler::greater, method_live);
 439 
 440   // Is it a weak but alive CLD?
 441   __ movptr(rscratch1, Address(rscratch1, ClassLoaderData::holder_offset()));
 442   peek_weak_handle(masm, rscratch1, rscratch2);
 443   __ cmpptr(rscratch1, 0);
 444   __ jcc(Assembler::notEqual, method_live);
 445 
 446   __ bind(bad_call);
 447   __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));
 448   __ bind(method_live);
 449 }
 450 
 451 void ShenandoahBarrierSetAssembler::load_reference_barrier_native(MacroAssembler* masm, Register dst) {
 452   if (!ShenandoahLoadRefBarrier) {
 453     return;
 454   }
 455 
 456   Label done;
 457   Label not_null;
 458   Label slow_path;
 459 
 460   // null check
 461   __ testptr(dst, dst);
 462   __ jcc(Assembler::notZero, not_null);
 463   __ jmp(done);
 464   __ bind(not_null);
 465 
 466 
 467 #ifdef _LP64
 468   Register thread = r15_thread;
 469 #else
 470   Register thread = rcx;


 818   Register pre_val_reg = stub->pre_val()->as_register();
 819 
 820   if (stub->do_load()) {
 821     ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
 822   }
 823 
 824   __ cmpptr(pre_val_reg, (int32_t)NULL_WORD);
 825   __ jcc(Assembler::equal, *stub->continuation());
 826   ce->store_parameter(stub->pre_val()->as_register(), 0);
 827   __ call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
 828   __ jmp(*stub->continuation());
 829 
 830 }
 831 
 832 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {
 833   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 834   __ bind(*stub->entry());
 835 
 836   Register obj = stub->obj()->as_register();
 837   Register res = stub->result()->as_register();
 838   Register addr = stub->addr()->as_register();
 839   Register tmp1 = stub->tmp1()->as_register();
 840   Register tmp2 = stub->tmp2()->as_register();
 841   assert_different_registers(obj, res, addr, tmp1, tmp2);
 842 
 843   Label slow_path;
 844 
 845   assert(res == rax, "result must arrive in rax");
 846 
 847   if (res != obj) {
 848     __ mov(res, obj);
 849   }
 850 
 851   // Check for null.
 852   __ testptr(res, res);
 853   __ jcc(Assembler::zero, *stub->continuation());
 854 
 855   // Check for object being in the collection set.
 856   __ mov(tmp1, res);
 857   __ shrptr(tmp1, ShenandoahHeapRegion::region_size_bytes_shift_jint());
 858   __ movptr(tmp2, (intptr_t) ShenandoahHeap::in_cset_fast_test_addr());
 859 #ifdef _LP64
 860   __ movbool(tmp2, Address(tmp2, tmp1, Address::times_1));
 861   __ testbool(tmp2);
 862 #else
 863   // On x86_32, C1 register allocator can give us the register without 8-bit support.
 864   // Do the full-register access and test to avoid compilation failures.
 865   __ movptr(tmp2, Address(tmp2, tmp1, Address::times_1));
 866   __ testptr(tmp2, 0xFF);
 867 #endif
 868   __ jcc(Assembler::zero, *stub->continuation());
 869 





















 870   __ bind(slow_path);
 871   ce->store_parameter(res, 0);
 872   ce->store_parameter(addr, 1);
 873   __ call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
 874 
 875   __ jmp(*stub->continuation());
 876 }
 877 
 878 #undef __
 879 
 880 #define __ sasm->
 881 
 882 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
 883   __ prologue("shenandoah_pre_barrier", false);
 884   // arg0 : previous value of memory
 885 
 886   __ push(rax);
 887   __ push(rdx);
 888 
 889   const Register pre_val = rax;
 890   const Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread);
 891   const Register tmp = rdx;
 892 


 923 
 924   // load the pre-value
 925   __ load_parameter(0, rcx);
 926   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), rcx, thread);
 927 
 928   __ restore_live_registers(true);
 929 
 930   __ bind(done);
 931 
 932   __ pop(rdx);
 933   __ pop(rax);
 934 
 935   __ epilogue();
 936 }
 937 
 938 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm) {
 939   __ prologue("shenandoah_load_reference_barrier", false);
 940   // arg0 : object to be resolved
 941 
 942   __ save_live_registers_no_oop_map(true);
 943 
 944 #ifdef _LP64
 945   __ load_parameter(0, c_rarg0);
 946   __ load_parameter(1, c_rarg1);
 947   if (UseCompressedOops) {
 948     __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_fixup_narrow), c_rarg0, c_rarg1);
 949   } else {
 950     __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_fixup), c_rarg0, c_rarg1);
 951   }
 952 #else
 953   __ load_parameter(0, rax);
 954   __ load_parameter(1, rbx);
 955   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_fixup), rax, rbx);
 956 #endif
 957 
 958   __ restore_live_registers_except_rax(true);
 959 
 960   __ epilogue();
 961 }
 962 
 963 #undef __
 964 
 965 #endif // COMPILER1
 966 
 967 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
 968   assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
 969   return _shenandoah_lrb;
 970 }
 971 
 972 #define __ cgen->assembler()->
 973 
 974 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
 975   __ align(CodeEntryAlignment);
 976   StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
 977   address start = __ pc();


< prev index next >