< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page

2571   } else if (op->code() == lir_lock) {
2572     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2573     // add debug info for NullPointerException only if one is possible
2574     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2575     if (op->info() != NULL) {
2576       add_debug_info_for_null_check(null_check_offset, op->info());
2577     }
2578     // done
2579   } else if (op->code() == lir_unlock) {
2580     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2581     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2582   } else {
2583     Unimplemented();
2584   }
2585   __ bind(*op->stub()->continuation());
2586 }
2587 
2588 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2589   Register obj = op->obj()->as_pointer_register();
2590   Register result = op->result_opr()->as_pointer_register();

2591 
2592   CodeEmitInfo* info = op->info();
2593   if (info != NULL) {
2594     add_debug_info_for_null_check_here(info);
2595   }
2596 
2597   if (UseCompressedClassPointers) {
2598     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2599     __ decode_klass_not_null(result);
2600   } else {
2601     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2602   }








2603 }
2604 
2605 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2606   ciMethod* method = op->profiled_method();
2607   int bci          = op->profiled_bci();
2608   ciMethod* callee = op->profiled_callee();
2609 
2610   // Update counter for all call types
2611   ciMethodData* md = method->method_data_or_null();
2612   assert(md != NULL, "Sanity");
2613   ciProfileData* data = md->bci_to_data(bci);
2614   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2615   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2616   Register mdo  = op->mdo()->as_register();
2617   __ mov_metadata(mdo, md->constant_encoding());
2618   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
2619   // Perform additional virtual call profiling for invokevirtual and
2620   // invokeinterface bytecodes
2621   if (op->should_profile_receiver_type()) {
2622     assert(op->recv()->is_single_cpu(), "recv must be allocated");

2571   } else if (op->code() == lir_lock) {
2572     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2573     // add debug info for NullPointerException only if one is possible
2574     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2575     if (op->info() != NULL) {
2576       add_debug_info_for_null_check(null_check_offset, op->info());
2577     }
2578     // done
2579   } else if (op->code() == lir_unlock) {
2580     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2581     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2582   } else {
2583     Unimplemented();
2584   }
2585   __ bind(*op->stub()->continuation());
2586 }
2587 
2588 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2589   Register obj = op->obj()->as_pointer_register();
2590   Register result = op->result_opr()->as_pointer_register();
2591   Register tmp = rscratch1;
2592 
2593   CodeEmitInfo* info = op->info();
2594   if (info != NULL) {
2595     add_debug_info_for_null_check_here(info);
2596   }
2597 
2598   assert(UseCompressedClassPointers, "expects UseCompressedClassPointers");
2599 
2600   // Check if we can take the (common) fast path, if obj is unlocked.
2601   __ ldr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
2602   __ eor(tmp, tmp, markWord::unlocked_value);
2603   __ tst(tmp, markWord::lock_mask_in_place);
2604   __ br(Assembler::NE, *op->stub()->entry());
2605 
2606   // Fast-path: shift and decode Klass*.
2607   __ mov(result, tmp);
2608   __ lsr(result, result, markWord::klass_shift);
2609   __ decode_klass_not_null(result);
2610 
2611   __ bind(*op->stub()->continuation());
2612 }
2613 
2614 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2615   ciMethod* method = op->profiled_method();
2616   int bci          = op->profiled_bci();
2617   ciMethod* callee = op->profiled_callee();
2618 
2619   // Update counter for all call types
2620   ciMethodData* md = method->method_data_or_null();
2621   assert(md != NULL, "Sanity");
2622   ciProfileData* data = md->bci_to_data(bci);
2623   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2624   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2625   Register mdo  = op->mdo()->as_register();
2626   __ mov_metadata(mdo, md->constant_encoding());
2627   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
2628   // Perform additional virtual call profiling for invokevirtual and
2629   // invokeinterface bytecodes
2630   if (op->should_profile_receiver_type()) {
2631     assert(op->recv()->is_single_cpu(), "recv must be allocated");
< prev index next >