< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page

 427   __ str(zr, Address(rthread, JavaThread::exception_oop_offset()));
 428   __ str(zr, Address(rthread, JavaThread::exception_pc_offset()));
 429 
 430   __ bind(_unwind_handler_entry);
 431   __ verify_not_null_oop(r0);
 432   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 433     __ mov(r19, r0);  // Preserve the exception
 434   }
 435 
 436   // Preform needed unlocking
 437   MonitorExitStub* stub = NULL;
 438   if (method()->is_synchronized()) {
 439     monitor_address(0, FrameMap::r0_opr);
 440     stub = new MonitorExitStub(FrameMap::r0_opr, true, 0);
 441     if (UseHeavyMonitors) {
 442       __ b(*stub->entry());
 443     } else {
 444       __ unlock_object(r5, r4, r0, *stub->entry());
 445     }
 446     __ bind(*stub->continuation());

 447   }
 448 
 449   if (compilation()->env()->dtrace_method_probes()) {
 450     __ mov(c_rarg0, rthread);
 451     __ mov_metadata(c_rarg1, method()->constant_encoding());
 452     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), c_rarg0, c_rarg1);
 453   }
 454 
 455   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 456     __ mov(r0, r19);  // Restore the exception
 457   }
 458 
 459   // remove the activation and dispatch to the unwind handler
 460   __ block_comment("remove_frame and dispatch to the unwind handler");
 461   __ remove_frame(initial_frame_size_in_bytes());
 462   __ far_jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
 463 
 464   // Emit the slow path assembly
 465   if (stub != NULL) {
 466     stub->emit_code(this);

2031     __ mov(dst->as_register(), (uint64_t)-1L);
2032     __ br(Assembler::LT, done);
2033     __ csinc(dst->as_register(), zr, zr, Assembler::EQ);
2034     __ bind(done);
2035   } else {
2036     ShouldNotReachHere();
2037   }
2038 }
2039 
2040 
2041 void LIR_Assembler::align_call(LIR_Code code) {  }
2042 
2043 
2044 void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
2045   address call = __ trampoline_call(Address(op->addr(), rtype));
2046   if (call == NULL) {
2047     bailout("trampoline stub overflow");
2048     return;
2049   }
2050   add_call_info(code_offset(), op->info());

2051 }
2052 
2053 
2054 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
2055   address call = __ ic_call(op->addr());
2056   if (call == NULL) {
2057     bailout("trampoline stub overflow");
2058     return;
2059   }
2060   add_call_info(code_offset(), op->info());

2061 }
2062 
2063 void LIR_Assembler::emit_static_call_stub() {
2064   address call_pc = __ pc();
2065   address stub = __ start_a_stub(call_stub_size());
2066   if (stub == NULL) {
2067     bailout("static call stub overflow");
2068     return;
2069   }
2070 
2071   int start = __ offset();
2072 
2073   __ relocate(static_stub_Relocation::spec(call_pc));
2074   __ emit_static_call_stub();
2075 
2076   assert(__ offset() - start + CompiledStaticCall::to_trampoline_stub_size()
2077         <= call_stub_size(), "stub too big");
2078   __ end_a_stub();
2079 }
2080 

2565 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2566   Register obj = op->obj_opr()->as_register();  // may not be an oop
2567   Register hdr = op->hdr_opr()->as_register();
2568   Register lock = op->lock_opr()->as_register();
2569   if (UseHeavyMonitors) {
2570     __ b(*op->stub()->entry());
2571   } else if (op->code() == lir_lock) {
2572     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2573     // add debug info for NullPointerException only if one is possible
2574     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2575     if (op->info() != NULL) {
2576       add_debug_info_for_null_check(null_check_offset, op->info());
2577     }
2578     // done
2579   } else if (op->code() == lir_unlock) {
2580     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2581     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2582   } else {
2583     Unimplemented();
2584   }







2585   __ bind(*op->stub()->continuation());




2586 }
2587 
2588 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2589   Register obj = op->obj()->as_pointer_register();
2590   Register result = op->result_opr()->as_pointer_register();
2591 
2592   CodeEmitInfo* info = op->info();
2593   if (info != NULL) {
2594     add_debug_info_for_null_check_here(info);
2595   }
2596 
2597   if (UseCompressedClassPointers) {
2598     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2599     __ decode_klass_not_null(result);
2600   } else {
2601     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2602   }
2603 }
2604 
2605 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {

2893   }
2894 
2895   __ lea(dest->as_register_lo(), as_Address(addr->as_address_ptr()));
2896 }
2897 
2898 
2899 void LIR_Assembler::rt_call(LIR_Opr result, address dest, const LIR_OprList* args, LIR_Opr tmp, CodeEmitInfo* info) {
2900   assert(!tmp->is_valid(), "don't need temporary");
2901 
2902   CodeBlob *cb = CodeCache::find_blob(dest);
2903   if (cb) {
2904     __ far_call(RuntimeAddress(dest));
2905   } else {
2906     __ mov(rscratch1, RuntimeAddress(dest));
2907     __ blr(rscratch1);
2908   }
2909 
2910   if (info != NULL) {
2911     add_call_info_here(info);
2912   }

2913 }
2914 
2915 void LIR_Assembler::volatile_move_op(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info) {
2916   if (dest->is_address() || src->is_address()) {
2917     move_op(src, dest, type, lir_patch_none, info,
2918             /*pop_fpu_stack*/false, /*wide*/false);
2919   } else {
2920     ShouldNotReachHere();
2921   }
2922 }
2923 
2924 #ifdef ASSERT
2925 // emit run-time assertion
2926 void LIR_Assembler::emit_assert(LIR_OpAssert* op) {
2927   assert(op->code() == lir_assert, "must be");
2928 
2929   if (op->in_opr1()->is_valid()) {
2930     assert(op->in_opr2()->is_valid(), "both operands must be valid");
2931     comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
2932   } else {

 427   __ str(zr, Address(rthread, JavaThread::exception_oop_offset()));
 428   __ str(zr, Address(rthread, JavaThread::exception_pc_offset()));
 429 
 430   __ bind(_unwind_handler_entry);
 431   __ verify_not_null_oop(r0);
 432   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 433     __ mov(r19, r0);  // Preserve the exception
 434   }
 435 
 436   // Preform needed unlocking
 437   MonitorExitStub* stub = NULL;
 438   if (method()->is_synchronized()) {
 439     monitor_address(0, FrameMap::r0_opr);
 440     stub = new MonitorExitStub(FrameMap::r0_opr, true, 0);
 441     if (UseHeavyMonitors) {
 442       __ b(*stub->entry());
 443     } else {
 444       __ unlock_object(r5, r4, r0, *stub->entry());
 445     }
 446     __ bind(*stub->continuation());
 447     __ dec_held_monitor_count(rthread);
 448   }
 449 
 450   if (compilation()->env()->dtrace_method_probes()) {
 451     __ mov(c_rarg0, rthread);
 452     __ mov_metadata(c_rarg1, method()->constant_encoding());
 453     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), c_rarg0, c_rarg1);
 454   }
 455 
 456   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 457     __ mov(r0, r19);  // Restore the exception
 458   }
 459 
 460   // remove the activation and dispatch to the unwind handler
 461   __ block_comment("remove_frame and dispatch to the unwind handler");
 462   __ remove_frame(initial_frame_size_in_bytes());
 463   __ far_jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
 464 
 465   // Emit the slow path assembly
 466   if (stub != NULL) {
 467     stub->emit_code(this);

2032     __ mov(dst->as_register(), (uint64_t)-1L);
2033     __ br(Assembler::LT, done);
2034     __ csinc(dst->as_register(), zr, zr, Assembler::EQ);
2035     __ bind(done);
2036   } else {
2037     ShouldNotReachHere();
2038   }
2039 }
2040 
2041 
2042 void LIR_Assembler::align_call(LIR_Code code) {  }
2043 
2044 
2045 void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
2046   address call = __ trampoline_call(Address(op->addr(), rtype));
2047   if (call == NULL) {
2048     bailout("trampoline stub overflow");
2049     return;
2050   }
2051   add_call_info(code_offset(), op->info());
2052   __ post_call_nop();
2053 }
2054 
2055 
2056 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
2057   address call = __ ic_call(op->addr());
2058   if (call == NULL) {
2059     bailout("trampoline stub overflow");
2060     return;
2061   }
2062   add_call_info(code_offset(), op->info());
2063   __ post_call_nop();
2064 }
2065 
2066 void LIR_Assembler::emit_static_call_stub() {
2067   address call_pc = __ pc();
2068   address stub = __ start_a_stub(call_stub_size());
2069   if (stub == NULL) {
2070     bailout("static call stub overflow");
2071     return;
2072   }
2073 
2074   int start = __ offset();
2075 
2076   __ relocate(static_stub_Relocation::spec(call_pc));
2077   __ emit_static_call_stub();
2078 
2079   assert(__ offset() - start + CompiledStaticCall::to_trampoline_stub_size()
2080         <= call_stub_size(), "stub too big");
2081   __ end_a_stub();
2082 }
2083 

2568 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2569   Register obj = op->obj_opr()->as_register();  // may not be an oop
2570   Register hdr = op->hdr_opr()->as_register();
2571   Register lock = op->lock_opr()->as_register();
2572   if (UseHeavyMonitors) {
2573     __ b(*op->stub()->entry());
2574   } else if (op->code() == lir_lock) {
2575     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2576     // add debug info for NullPointerException only if one is possible
2577     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2578     if (op->info() != NULL) {
2579       add_debug_info_for_null_check(null_check_offset, op->info());
2580     }
2581     // done
2582   } else if (op->code() == lir_unlock) {
2583     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2584     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2585   } else {
2586     Unimplemented();
2587   }
2588   if (op->code() == lir_lock) {
2589     // If deoptimization happens in Runtime1::monitorenter, inc_held_monitor_count after backing from slowpath
2590     // will be skipped. Solution is:
2591     // 1. Increase only in fastpath
2592     // 2. Runtime1::monitorenter increase count after locking
2593     __ inc_held_monitor_count(rthread);
2594   }
2595   __ bind(*op->stub()->continuation());
2596   if (op->code() == lir_unlock) {
2597     // unlock in slowpath is JRT_Leaf stub, no deoptimization can happen
2598     __ dec_held_monitor_count(rthread);
2599   }
2600 }
2601 
2602 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2603   Register obj = op->obj()->as_pointer_register();
2604   Register result = op->result_opr()->as_pointer_register();
2605 
2606   CodeEmitInfo* info = op->info();
2607   if (info != NULL) {
2608     add_debug_info_for_null_check_here(info);
2609   }
2610 
2611   if (UseCompressedClassPointers) {
2612     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2613     __ decode_klass_not_null(result);
2614   } else {
2615     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2616   }
2617 }
2618 
2619 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {

2907   }
2908 
2909   __ lea(dest->as_register_lo(), as_Address(addr->as_address_ptr()));
2910 }
2911 
2912 
2913 void LIR_Assembler::rt_call(LIR_Opr result, address dest, const LIR_OprList* args, LIR_Opr tmp, CodeEmitInfo* info) {
2914   assert(!tmp->is_valid(), "don't need temporary");
2915 
2916   CodeBlob *cb = CodeCache::find_blob(dest);
2917   if (cb) {
2918     __ far_call(RuntimeAddress(dest));
2919   } else {
2920     __ mov(rscratch1, RuntimeAddress(dest));
2921     __ blr(rscratch1);
2922   }
2923 
2924   if (info != NULL) {
2925     add_call_info_here(info);
2926   }
2927   __ post_call_nop();
2928 }
2929 
2930 void LIR_Assembler::volatile_move_op(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info) {
2931   if (dest->is_address() || src->is_address()) {
2932     move_op(src, dest, type, lir_patch_none, info,
2933             /*pop_fpu_stack*/false, /*wide*/false);
2934   } else {
2935     ShouldNotReachHere();
2936   }
2937 }
2938 
2939 #ifdef ASSERT
2940 // emit run-time assertion
2941 void LIR_Assembler::emit_assert(LIR_OpAssert* op) {
2942   assert(op->code() == lir_assert, "must be");
2943 
2944   if (op->in_opr1()->is_valid()) {
2945     assert(op->in_opr2()->is_valid(), "both operands must be valid");
2946     comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
2947   } else {
< prev index next >