< prev index next >

src/hotspot/cpu/s390/c1_LIRAssembler_s390.cpp

Print this page

 119   // locals[nlocals-1..0]
 120   // monitors[number_of_locks-1..0]
 121   //
 122   // Locals is a direct copy of the interpreter frame so in the osr buffer
 123   // the first slot in the local array is the last local from the interpreter
 124   // and the last slot is local[0] (receiver) from the interpreter
 125   //
 126   // Similarly with locks. The first lock slot in the osr buffer is the nth lock
 127   // from the interpreter frame, the nth lock slot in the osr buffer is 0th lock
 128   // in the interpreter frame (the method lock if a sync method)
 129 
 130   // Initialize monitors in the compiled activation.
 131   //   I0: pointer to osr buffer
 132   //
 133   // All other registers are dead at this point and the locals will be
 134   // copied into place by code emitted in the IR.
 135 
 136   Register OSR_buf = osrBufferPointer()->as_register();
 137   { assert(frame::interpreter_frame_monitor_size() == BasicObjectLock::size(), "adjust code below");
 138     int monitor_offset = BytesPerWord * method()->max_locals() +
 139       (2 * BytesPerWord) * (number_of_locks - 1);
 140     // SharedRuntime::OSR_migration_begin() packs BasicObjectLocks in
 141     // the OSR buffer using 2 word entries: first the lock and then
 142     // the oop.
 143     for (int i = 0; i < number_of_locks; i++) {
 144       int slot_offset = monitor_offset - ((i * 2) * BytesPerWord);
 145       // Verify the interpreter's monitor has a non-null object.
 146       __ asm_assert_mem8_isnot_zero(slot_offset + 1*BytesPerWord, OSR_buf, "locked object is NULL", __LINE__);
 147       // Copy the lock field into the compiled activation.
 148       __ z_lg(Z_R1_scratch, slot_offset + 0, OSR_buf);
 149       __ z_stg(Z_R1_scratch, frame_map()->address_for_monitor_lock(i));
 150       __ z_lg(Z_R1_scratch, slot_offset + 1*BytesPerWord, OSR_buf);
 151       __ z_stg(Z_R1_scratch, frame_map()->address_for_monitor_object(i));
 152     }
 153   }
 154 }
 155 
 156 // --------------------------------------------------------------------------------------------
 157 
 158 address LIR_Assembler::emit_call_c(address a) {
 159   __ align_call_far_patchable(__ pc());
 160   address call_addr = __ call_c_opt(a);
 161   if (call_addr == NULL) {
 162     bailout("const section overflow");
 163   }
 164   return call_addr;
 165 }
 166 
 167 int LIR_Assembler::emit_exception_handler() {
 168   // Generate code for exception handler.
 169   address handler_base = __ start_a_stub(exception_handler_size());
 170   if (handler_base == NULL) {

 201 
 202   // Fetch the exception from TLS and clear out exception related thread state.
 203   Address exc_oop_addr = Address(Z_thread, JavaThread::exception_oop_offset());
 204   Address exc_pc_addr  = Address(Z_thread, JavaThread::exception_pc_offset());
 205   __ z_lg(Z_EXC_OOP, exc_oop_addr);
 206   __ clear_mem(exc_oop_addr, sizeof(oop));
 207   __ clear_mem(exc_pc_addr, sizeof(intptr_t));
 208 
 209   __ bind(_unwind_handler_entry);
 210   __ verify_not_null_oop(Z_EXC_OOP);
 211   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 212     __ lgr_if_needed(exception_oop_callee_saved, Z_EXC_OOP); // Preserve the exception.
 213   }
 214 
 215   // Perform needed unlocking.
 216   MonitorExitStub* stub = NULL;
 217   if (method()->is_synchronized()) {
 218     // Runtime1::monitorexit_id expects lock address in Z_R1_scratch.
 219     LIR_Opr lock = FrameMap::as_opr(Z_R1_scratch);
 220     monitor_address(0, lock);
 221     stub = new MonitorExitStub(lock, true, 0);
 222     __ unlock_object(Rtmp1, Rtmp2, lock->as_register(), *stub->entry());

 223     __ bind(*stub->continuation());
 224   }
 225 
 226   if (compilation()->env()->dtrace_method_probes()) {
 227     ShouldNotReachHere(); // Not supported.
 228 #if 0
 229     __ mov(rdi, r15_thread);
 230     __ mov_metadata(rsi, method()->constant_encoding());
 231     __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit)));
 232 #endif
 233   }
 234 
 235   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 236     __ lgr_if_needed(Z_EXC_OOP, exception_oop_callee_saved);  // Restore the exception.
 237   }
 238 
 239   // Remove the activation and dispatch to the unwind handler.
 240   __ pop_frame();
 241   __ z_lg(Z_EXC_PC, _z_abi16(return_pc), Z_SP);
 242 

2695     }
2696   } else {
2697     ShouldNotReachHere(); // new lir_cas_??
2698   }
2699 }
2700 
2701 void LIR_Assembler::breakpoint() {
2702   Unimplemented();
2703   //  __ breakpoint_trap();
2704 }
2705 
2706 void LIR_Assembler::push(LIR_Opr opr) {
2707   ShouldNotCallThis(); // unused
2708 }
2709 
2710 void LIR_Assembler::pop(LIR_Opr opr) {
2711   ShouldNotCallThis(); // unused
2712 }
2713 
2714 void LIR_Assembler::monitor_address(int monitor_no, LIR_Opr dst_opr) {
2715   Address addr = frame_map()->address_for_monitor_lock(monitor_no);
2716   __ add2reg(dst_opr->as_register(), addr.disp(), addr.base());
2717 }
2718 
2719 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2720   Register obj = op->obj_opr()->as_register();  // May not be an oop.
2721   Register hdr = op->hdr_opr()->as_register();
2722   Register lock = op->lock_opr()->as_register();
2723   if (UseHeavyMonitors) {
2724     if (op->info() != NULL) {
2725       add_debug_info_for_null_check_here(op->info());
2726       __ null_check(obj);
2727     }
2728     __ branch_optimized(Assembler::bcondAlways, *op->stub()->entry());
2729   } else if (op->code() == lir_lock) {
2730     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2731     // Add debug info for NullPointerException only if one is possible.
2732     if (op->info() != NULL) {
2733       add_debug_info_for_null_check_here(op->info());
2734     }
2735     __ lock_object(hdr, obj, lock, *op->stub()->entry());
2736     // done
2737   } else if (op->code() == lir_unlock) {
2738     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2739     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2740   } else {
2741     ShouldNotReachHere();
2742   }

2743   __ bind(*op->stub()->continuation());
2744 }
2745 
2746 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2747   Register obj = op->obj()->as_pointer_register();
2748   Register result = op->result_opr()->as_pointer_register();
2749 
2750   CodeEmitInfo* info = op->info();
2751   if (info != NULL) {
2752     add_debug_info_for_null_check_here(info);
2753   }
2754 
2755   if (UseCompressedClassPointers) {
2756     __ z_llgf(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2757     __ decode_klass_not_null(result);
2758   } else {
2759     __ z_lg(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2760   }
2761 }
2762 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {

 119   // locals[nlocals-1..0]
 120   // monitors[number_of_locks-1..0]
 121   //
 122   // Locals is a direct copy of the interpreter frame so in the osr buffer
 123   // the first slot in the local array is the last local from the interpreter
 124   // and the last slot is local[0] (receiver) from the interpreter
 125   //
 126   // Similarly with locks. The first lock slot in the osr buffer is the nth lock
 127   // from the interpreter frame, the nth lock slot in the osr buffer is 0th lock
 128   // in the interpreter frame (the method lock if a sync method)
 129 
 130   // Initialize monitors in the compiled activation.
 131   //   I0: pointer to osr buffer
 132   //
 133   // All other registers are dead at this point and the locals will be
 134   // copied into place by code emitted in the IR.
 135 
 136   Register OSR_buf = osrBufferPointer()->as_register();
 137   { assert(frame::interpreter_frame_monitor_size() == BasicObjectLock::size(), "adjust code below");
 138     int monitor_offset = BytesPerWord * method()->max_locals() +
 139       BytesPerWord * (number_of_locks - 1);



 140     for (int i = 0; i < number_of_locks; i++) {
 141       int slot_offset = monitor_offset - (i * BytesPerWord);
 142       // Verify the interpreter's monitor has a non-null object.
 143       __ asm_assert_mem8_isnot_zero(slot_offset + 1*BytesPerWord, OSR_buf, "locked object is NULL", __LINE__);
 144       // Copy the lock field into the compiled activation.
 145       __ z_lg(Z_R1_scratch, slot_offset, OSR_buf);


 146       __ z_stg(Z_R1_scratch, frame_map()->address_for_monitor_object(i));
 147     }
 148   }
 149 }
 150 
 151 // --------------------------------------------------------------------------------------------
 152 
 153 address LIR_Assembler::emit_call_c(address a) {
 154   __ align_call_far_patchable(__ pc());
 155   address call_addr = __ call_c_opt(a);
 156   if (call_addr == NULL) {
 157     bailout("const section overflow");
 158   }
 159   return call_addr;
 160 }
 161 
 162 int LIR_Assembler::emit_exception_handler() {
 163   // Generate code for exception handler.
 164   address handler_base = __ start_a_stub(exception_handler_size());
 165   if (handler_base == NULL) {

 196 
 197   // Fetch the exception from TLS and clear out exception related thread state.
 198   Address exc_oop_addr = Address(Z_thread, JavaThread::exception_oop_offset());
 199   Address exc_pc_addr  = Address(Z_thread, JavaThread::exception_pc_offset());
 200   __ z_lg(Z_EXC_OOP, exc_oop_addr);
 201   __ clear_mem(exc_oop_addr, sizeof(oop));
 202   __ clear_mem(exc_pc_addr, sizeof(intptr_t));
 203 
 204   __ bind(_unwind_handler_entry);
 205   __ verify_not_null_oop(Z_EXC_OOP);
 206   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 207     __ lgr_if_needed(exception_oop_callee_saved, Z_EXC_OOP); // Preserve the exception.
 208   }
 209 
 210   // Perform needed unlocking.
 211   MonitorExitStub* stub = NULL;
 212   if (method()->is_synchronized()) {
 213     // Runtime1::monitorexit_id expects lock address in Z_R1_scratch.
 214     LIR_Opr lock = FrameMap::as_opr(Z_R1_scratch);
 215     monitor_address(0, lock);
 216     __ z_lg(Z_R1_scratch, Address(Z_R1_scratch, BasicObjectLock::obj_offset_in_bytes()));
 217     stub = new MonitorExitStub(lock);
 218     __ branch_optimized(Assembler::bcondAlways, *stub->entry());
 219     __ bind(*stub->continuation());
 220   }
 221 
 222   if (compilation()->env()->dtrace_method_probes()) {
 223     ShouldNotReachHere(); // Not supported.
 224 #if 0
 225     __ mov(rdi, r15_thread);
 226     __ mov_metadata(rsi, method()->constant_encoding());
 227     __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit)));
 228 #endif
 229   }
 230 
 231   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 232     __ lgr_if_needed(Z_EXC_OOP, exception_oop_callee_saved);  // Restore the exception.
 233   }
 234 
 235   // Remove the activation and dispatch to the unwind handler.
 236   __ pop_frame();
 237   __ z_lg(Z_EXC_PC, _z_abi16(return_pc), Z_SP);
 238 

2691     }
2692   } else {
2693     ShouldNotReachHere(); // new lir_cas_??
2694   }
2695 }
2696 
2697 void LIR_Assembler::breakpoint() {
2698   Unimplemented();
2699   //  __ breakpoint_trap();
2700 }
2701 
2702 void LIR_Assembler::push(LIR_Opr opr) {
2703   ShouldNotCallThis(); // unused
2704 }
2705 
2706 void LIR_Assembler::pop(LIR_Opr opr) {
2707   ShouldNotCallThis(); // unused
2708 }
2709 
2710 void LIR_Assembler::monitor_address(int monitor_no, LIR_Opr dst_opr) {
2711   Address addr = frame_map()->address_for_monitor_object(monitor_no);
2712   __ add2reg(dst_opr->as_register(), addr.disp(), addr.base());
2713 }
2714 
2715 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2716   Register obj = op->obj_opr()->as_register();  // May not be an oop.
2717   Register hdr = op->hdr_opr()->as_register();
2718   Register lock = op->lock_opr()->as_register();
2719   if (op->info() != NULL) {
2720     add_debug_info_for_null_check_here(op->info());
2721     __ null_check(obj);
















2722   }
2723   __ branch_optimized(Assembler::bcondAlways, *op->stub()->entry());
2724   __ bind(*op->stub()->continuation());
2725 }
2726 
2727 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2728   Register obj = op->obj()->as_pointer_register();
2729   Register result = op->result_opr()->as_pointer_register();
2730 
2731   CodeEmitInfo* info = op->info();
2732   if (info != NULL) {
2733     add_debug_info_for_null_check_here(info);
2734   }
2735 
2736   if (UseCompressedClassPointers) {
2737     __ z_llgf(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2738     __ decode_klass_not_null(result);
2739   } else {
2740     __ z_lg(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2741   }
2742 }
2743 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
< prev index next >