< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page

 421 #endif
 422 
 423   int offset = code_offset();
 424 
 425   // Fetch the exception from TLS and clear out exception related thread state
 426   __ ldr(r0, Address(rthread, JavaThread::exception_oop_offset()));
 427   __ str(zr, Address(rthread, JavaThread::exception_oop_offset()));
 428   __ str(zr, Address(rthread, JavaThread::exception_pc_offset()));
 429 
 430   __ bind(_unwind_handler_entry);
 431   __ verify_not_null_oop(r0);
 432   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 433     __ mov(r19, r0);  // Preserve the exception
 434   }
 435 
 436   // Preform needed unlocking
 437   MonitorExitStub* stub = NULL;
 438   if (method()->is_synchronized()) {
 439     monitor_address(0, FrameMap::r0_opr);
 440     stub = new MonitorExitStub(FrameMap::r0_opr, true, 0);
 441     __ unlock_object(r5, r4, r0, *stub->entry());
 442     __ bind(*stub->continuation());
 443   }
 444 
 445   if (compilation()->env()->dtrace_method_probes()) {
 446     __ mov(c_rarg0, rthread);
 447     __ mov_metadata(c_rarg1, method()->constant_encoding());
 448     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), c_rarg0, c_rarg1);
 449   }
 450 
 451   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 452     __ mov(r0, r19);  // Restore the exception
 453   }
 454 
 455   // remove the activation and dispatch to the unwind handler
 456   __ block_comment("remove_frame and dispatch to the unwind handler");
 457   __ remove_frame(initial_frame_size_in_bytes());
 458   __ far_jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
 459 
 460   // Emit the slow path assembly
 461   if (stub != NULL) {

1222       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1223       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1224     __ b(*op->stub()->entry());
1225   } else {
1226     Register tmp1 = op->tmp1()->as_register();
1227     Register tmp2 = op->tmp2()->as_register();
1228     Register tmp3 = op->tmp3()->as_register();
1229     if (len == tmp1) {
1230       tmp1 = tmp3;
1231     } else if (len == tmp2) {
1232       tmp2 = tmp3;
1233     } else if (len == tmp3) {
1234       // everything is ok
1235     } else {
1236       __ mov(tmp3, len);
1237     }
1238     __ allocate_array(op->obj()->as_register(),
1239                       len,
1240                       tmp1,
1241                       tmp2,
1242                       arrayOopDesc::header_size(op->type()),
1243                       array_element_size(op->type()),
1244                       op->klass()->as_register(),
1245                       *op->stub()->entry());
1246   }
1247   __ bind(*op->stub()->continuation());
1248 }
1249 
1250 void LIR_Assembler::type_profile_helper(Register mdo,
1251                                         ciMethodData *md, ciProfileData *data,
1252                                         Register recv, Label* update_done) {
1253   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1254     Label next_test;
1255     // See if the receiver is receiver[n].
1256     __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1257     __ ldr(rscratch1, Address(rscratch2));
1258     __ cmp(recv, rscratch1);
1259     __ br(Assembler::NE, next_test);
1260     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1261     __ addptr(data_addr, DataLayout::counter_increment);
1262     __ b(*update_done);

2339   }
2340 
2341   if (flags & LIR_OpArrayCopy::src_range_check) {
2342     __ addw(tmp, src_pos, length);
2343     __ ldrw(rscratch1, src_length_addr);
2344     __ cmpw(tmp, rscratch1);
2345     __ br(Assembler::HI, *stub->entry());
2346   }
2347   if (flags & LIR_OpArrayCopy::dst_range_check) {
2348     __ addw(tmp, dst_pos, length);
2349     __ ldrw(rscratch1, dst_length_addr);
2350     __ cmpw(tmp, rscratch1);
2351     __ br(Assembler::HI, *stub->entry());
2352   }
2353 
2354   if (flags & LIR_OpArrayCopy::type_check) {
2355     // We don't know the array types are compatible
2356     if (basic_type != T_OBJECT) {
2357       // Simple test for basic type arrays
2358       if (UseCompressedClassPointers) {
2359         __ ldrw(tmp, src_klass_addr);
2360         __ ldrw(rscratch1, dst_klass_addr);
2361         __ cmpw(tmp, rscratch1);
2362       } else {
2363         __ ldr(tmp, src_klass_addr);
2364         __ ldr(rscratch1, dst_klass_addr);
2365         __ cmp(tmp, rscratch1);
2366       }
2367       __ br(Assembler::NE, *stub->entry());
2368     } else {
2369       // For object arrays, if src is a sub class of dst then we can
2370       // safely do the copy.
2371       Label cont, slow;
2372 
2373 #define PUSH(r1, r2)                                    \
2374       stp(r1, r2, __ pre(sp, -2 * wordSize));
2375 
2376 #define POP(r1, r2)                                     \
2377       ldp(r1, r2, __ post(sp, 2 * wordSize));
2378 
2379       __ PUSH(src, dst);
2380 
2381       __ load_klass(src, src);
2382       __ load_klass(dst, dst);
2383 
2384       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);

2468       }
2469 
2470       __ b(*stub->entry());
2471 
2472       __ bind(cont);
2473       __ POP(src, dst);
2474     }
2475   }
2476 
2477 #ifdef ASSERT
2478   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2479     // Sanity check the known type with the incoming class.  For the
2480     // primitive case the types must match exactly with src.klass and
2481     // dst.klass each exactly matching the default type.  For the
2482     // object array case, if no type check is needed then either the
2483     // dst type is exactly the expected type and the src type is a
2484     // subtype which we can't check or src is the same array as dst
2485     // but not necessarily exactly of type default_type.
2486     Label known_ok, halt;
2487     __ mov_metadata(tmp, default_type->constant_encoding());
2488     if (UseCompressedClassPointers) {
2489       __ encode_klass_not_null(tmp);
2490     }
2491 
2492     if (basic_type != T_OBJECT) {
2493 
2494       if (UseCompressedClassPointers) {
2495         __ ldrw(rscratch1, dst_klass_addr);
2496         __ cmpw(tmp, rscratch1);
2497       } else {
2498         __ ldr(rscratch1, dst_klass_addr);
2499         __ cmp(tmp, rscratch1);
2500       }
2501       __ br(Assembler::NE, halt);
2502       if (UseCompressedClassPointers) {
2503         __ ldrw(rscratch1, src_klass_addr);
2504         __ cmpw(tmp, rscratch1);
2505       } else {
2506         __ ldr(rscratch1, src_klass_addr);
2507         __ cmp(tmp, rscratch1);
2508       }
2509       __ br(Assembler::EQ, known_ok);
2510     } else {
2511       if (UseCompressedClassPointers) {
2512         __ ldrw(rscratch1, dst_klass_addr);
2513         __ cmpw(tmp, rscratch1);
2514       } else {
2515         __ ldr(rscratch1, dst_klass_addr);
2516         __ cmp(tmp, rscratch1);
2517       }
2518       __ br(Assembler::EQ, known_ok);
2519       __ cmp(src, dst);
2520       __ br(Assembler::EQ, known_ok);
2521     }
2522     __ bind(halt);
2523     __ stop("incorrect type information in arraycopy");
2524     __ bind(known_ok);
2525   }
2526 #endif
2527 
2528 #ifndef PRODUCT
2529   if (PrintC1Statistics) {
2530     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2531   }
2532 #endif
2533 
2534   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2535   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2536   assert_different_registers(c_rarg0, dst, dst_pos, length);
2537   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2545   const char *name;
2546   address entry = StubRoutines::select_arraycopy_function(basic_type, aligned, disjoint, name, false);
2547 
2548  CodeBlob *cb = CodeCache::find_blob(entry);
2549  if (cb) {
2550    __ far_call(RuntimeAddress(entry));
2551  } else {
2552    __ call_VM_leaf(entry, 3);
2553  }
2554 
2555   __ bind(*stub->continuation());
2556 }
2557 
2558 
2559 
2560 
2561 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2562   Register obj = op->obj_opr()->as_register();  // may not be an oop
2563   Register hdr = op->hdr_opr()->as_register();
2564   Register lock = op->lock_opr()->as_register();
2565   if (!UseFastLocking) {





2566     __ b(*op->stub()->entry());
2567   } else if (op->code() == lir_lock) {
2568     Register scratch = noreg;
2569     if (UseBiasedLocking) {
2570       scratch = op->scratch_opr()->as_register();
2571     }
2572     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2573     // add debug info for NullPointerException only if one is possible
2574     int null_check_offset = __ lock_object(hdr, obj, lock, scratch, *op->stub()->entry());
2575     if (op->info() != NULL) {
2576       add_debug_info_for_null_check(null_check_offset, op->info());
2577     }
2578     // done
2579   } else if (op->code() == lir_unlock) {
2580     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2581     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2582   } else {
2583     Unimplemented();
2584   }
2585   __ bind(*op->stub()->continuation());
2586 }
2587 
2588 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2589   Register obj = op->obj()->as_pointer_register();
2590   Register result = op->result_opr()->as_pointer_register();
2591 
2592   CodeEmitInfo* info = op->info();
2593   if (info != NULL) {
2594     add_debug_info_for_null_check_here(info);
2595   }
2596 
2597   if (UseCompressedClassPointers) {
2598     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));











2599     __ decode_klass_not_null(result);
2600   } else {
2601     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2602   }
2603 }
2604 
2605 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2606   ciMethod* method = op->profiled_method();
2607   int bci          = op->profiled_bci();
2608   ciMethod* callee = op->profiled_callee();
2609 
2610   // Update counter for all call types
2611   ciMethodData* md = method->method_data_or_null();
2612   assert(md != NULL, "Sanity");
2613   ciProfileData* data = md->bci_to_data(bci);
2614   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2615   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2616   Register mdo  = op->mdo()->as_register();
2617   __ mov_metadata(mdo, md->constant_encoding());
2618   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));

 421 #endif
 422 
 423   int offset = code_offset();
 424 
 425   // Fetch the exception from TLS and clear out exception related thread state
 426   __ ldr(r0, Address(rthread, JavaThread::exception_oop_offset()));
 427   __ str(zr, Address(rthread, JavaThread::exception_oop_offset()));
 428   __ str(zr, Address(rthread, JavaThread::exception_pc_offset()));
 429 
 430   __ bind(_unwind_handler_entry);
 431   __ verify_not_null_oop(r0);
 432   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 433     __ mov(r19, r0);  // Preserve the exception
 434   }
 435 
 436   // Preform needed unlocking
 437   MonitorExitStub* stub = NULL;
 438   if (method()->is_synchronized()) {
 439     monitor_address(0, FrameMap::r0_opr);
 440     stub = new MonitorExitStub(FrameMap::r0_opr, true, 0);
 441     __ unlock_object(r5, r4, r0, r6, *stub->entry());
 442     __ bind(*stub->continuation());
 443   }
 444 
 445   if (compilation()->env()->dtrace_method_probes()) {
 446     __ mov(c_rarg0, rthread);
 447     __ mov_metadata(c_rarg1, method()->constant_encoding());
 448     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), c_rarg0, c_rarg1);
 449   }
 450 
 451   if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
 452     __ mov(r0, r19);  // Restore the exception
 453   }
 454 
 455   // remove the activation and dispatch to the unwind handler
 456   __ block_comment("remove_frame and dispatch to the unwind handler");
 457   __ remove_frame(initial_frame_size_in_bytes());
 458   __ far_jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
 459 
 460   // Emit the slow path assembly
 461   if (stub != NULL) {

1222       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1223       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1224     __ b(*op->stub()->entry());
1225   } else {
1226     Register tmp1 = op->tmp1()->as_register();
1227     Register tmp2 = op->tmp2()->as_register();
1228     Register tmp3 = op->tmp3()->as_register();
1229     if (len == tmp1) {
1230       tmp1 = tmp3;
1231     } else if (len == tmp2) {
1232       tmp2 = tmp3;
1233     } else if (len == tmp3) {
1234       // everything is ok
1235     } else {
1236       __ mov(tmp3, len);
1237     }
1238     __ allocate_array(op->obj()->as_register(),
1239                       len,
1240                       tmp1,
1241                       tmp2,
1242                       arrayOopDesc::base_offset_in_bytes(op->type()),
1243                       array_element_size(op->type()),
1244                       op->klass()->as_register(),
1245                       *op->stub()->entry());
1246   }
1247   __ bind(*op->stub()->continuation());
1248 }
1249 
1250 void LIR_Assembler::type_profile_helper(Register mdo,
1251                                         ciMethodData *md, ciProfileData *data,
1252                                         Register recv, Label* update_done) {
1253   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1254     Label next_test;
1255     // See if the receiver is receiver[n].
1256     __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1257     __ ldr(rscratch1, Address(rscratch2));
1258     __ cmp(recv, rscratch1);
1259     __ br(Assembler::NE, next_test);
1260     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1261     __ addptr(data_addr, DataLayout::counter_increment);
1262     __ b(*update_done);

2339   }
2340 
2341   if (flags & LIR_OpArrayCopy::src_range_check) {
2342     __ addw(tmp, src_pos, length);
2343     __ ldrw(rscratch1, src_length_addr);
2344     __ cmpw(tmp, rscratch1);
2345     __ br(Assembler::HI, *stub->entry());
2346   }
2347   if (flags & LIR_OpArrayCopy::dst_range_check) {
2348     __ addw(tmp, dst_pos, length);
2349     __ ldrw(rscratch1, dst_length_addr);
2350     __ cmpw(tmp, rscratch1);
2351     __ br(Assembler::HI, *stub->entry());
2352   }
2353 
2354   if (flags & LIR_OpArrayCopy::type_check) {
2355     // We don't know the array types are compatible
2356     if (basic_type != T_OBJECT) {
2357       // Simple test for basic type arrays
2358       if (UseCompressedClassPointers) {
2359         __ load_nklass(tmp, src);
2360         __ load_nklass(rscratch1, dst);
2361         __ cmpw(tmp, rscratch1);
2362       } else {
2363         __ ldr(tmp, Address(src, oopDesc::klass_offset_in_bytes()));
2364         __ ldr(rscratch1, Address(dst, oopDesc::klass_offset_in_bytes()));
2365         __ cmp(tmp, rscratch1);
2366       }
2367       __ br(Assembler::NE, *stub->entry());
2368     } else {
2369       // For object arrays, if src is a sub class of dst then we can
2370       // safely do the copy.
2371       Label cont, slow;
2372 
2373 #define PUSH(r1, r2)                                    \
2374       stp(r1, r2, __ pre(sp, -2 * wordSize));
2375 
2376 #define POP(r1, r2)                                     \
2377       ldp(r1, r2, __ post(sp, 2 * wordSize));
2378 
2379       __ PUSH(src, dst);
2380 
2381       __ load_klass(src, src);
2382       __ load_klass(dst, dst);
2383 
2384       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);

2468       }
2469 
2470       __ b(*stub->entry());
2471 
2472       __ bind(cont);
2473       __ POP(src, dst);
2474     }
2475   }
2476 
2477 #ifdef ASSERT
2478   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2479     // Sanity check the known type with the incoming class.  For the
2480     // primitive case the types must match exactly with src.klass and
2481     // dst.klass each exactly matching the default type.  For the
2482     // object array case, if no type check is needed then either the
2483     // dst type is exactly the expected type and the src type is a
2484     // subtype which we can't check or src is the same array as dst
2485     // but not necessarily exactly of type default_type.
2486     Label known_ok, halt;
2487     __ mov_metadata(tmp, default_type->constant_encoding());



2488 
2489     if (basic_type != T_OBJECT) {
2490       __ cmp_klass(dst, tmp, rscratch1);







2491       __ br(Assembler::NE, halt);
2492       __ cmp_klass(src, tmp, rscratch1);






2493       __ br(Assembler::EQ, known_ok);
2494     } else {
2495       __ cmp_klass(dst, tmp, rscratch1);






2496       __ br(Assembler::EQ, known_ok);
2497       __ cmp(src, dst);
2498       __ br(Assembler::EQ, known_ok);
2499     }
2500     __ bind(halt);
2501     __ stop("incorrect type information in arraycopy");
2502     __ bind(known_ok);
2503   }
2504 #endif
2505 
2506 #ifndef PRODUCT
2507   if (PrintC1Statistics) {
2508     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2509   }
2510 #endif
2511 
2512   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2513   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2514   assert_different_registers(c_rarg0, dst, dst_pos, length);
2515   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2523   const char *name;
2524   address entry = StubRoutines::select_arraycopy_function(basic_type, aligned, disjoint, name, false);
2525 
2526  CodeBlob *cb = CodeCache::find_blob(entry);
2527  if (cb) {
2528    __ far_call(RuntimeAddress(entry));
2529  } else {
2530    __ call_VM_leaf(entry, 3);
2531  }
2532 
2533   __ bind(*stub->continuation());
2534 }
2535 
2536 
2537 
2538 
2539 void LIR_Assembler::emit_lock(LIR_OpLock* op) {
2540   Register obj = op->obj_opr()->as_register();  // may not be an oop
2541   Register hdr = op->hdr_opr()->as_register();
2542   Register lock = op->lock_opr()->as_register();
2543   Register temp = op->scratch_opr()->as_register();
2544   if (LockingMode == LM_MONITOR) {
2545     if (op->info() != NULL) {
2546       add_debug_info_for_null_check_here(op->info());
2547       __ null_check(obj, -1);
2548     }
2549     __ b(*op->stub()->entry());
2550   } else if (op->code() == lir_lock) {




2551     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2552     // add debug info for NullPointerException only if one is possible
2553     int null_check_offset = __ lock_object(hdr, obj, lock, temp, *op->stub()->entry());
2554     if (op->info() != NULL) {
2555       add_debug_info_for_null_check(null_check_offset, op->info());
2556     }
2557     // done
2558   } else if (op->code() == lir_unlock) {
2559     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2560     __ unlock_object(hdr, obj, lock, temp, *op->stub()->entry());
2561   } else {
2562     Unimplemented();
2563   }
2564   __ bind(*op->stub()->continuation());
2565 }
2566 
2567 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2568   Register obj = op->obj()->as_pointer_register();
2569   Register result = op->result_opr()->as_pointer_register();
2570 
2571   CodeEmitInfo* info = op->info();
2572   if (info != NULL) {
2573     add_debug_info_for_null_check_here(info);
2574   }
2575 
2576   if (UseCompressedClassPointers) {
2577     if (UseCompactObjectHeaders) {
2578       // Check if we can take the (common) fast path, if obj is unlocked.
2579       __ ldr(result, Address(obj, oopDesc::mark_offset_in_bytes()));
2580       __ tst(result, markWord::monitor_value);
2581       __ br(Assembler::NE, *op->stub()->entry());
2582       __ bind(*op->stub()->continuation());
2583 
2584       // Shift to get proper narrow Klass*.
2585       __ lsr(result, result, markWord::klass_shift);
2586     } else {
2587       __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2588     }
2589     __ decode_klass_not_null(result);
2590   } else {
2591     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2592   }
2593 }
2594 
2595 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2596   ciMethod* method = op->profiled_method();
2597   int bci          = op->profiled_bci();
2598   ciMethod* callee = op->profiled_callee();
2599 
2600   // Update counter for all call types
2601   ciMethodData* md = method->method_data_or_null();
2602   assert(md != NULL, "Sanity");
2603   ciProfileData* data = md->bci_to_data(bci);
2604   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2605   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2606   Register mdo  = op->mdo()->as_register();
2607   __ mov_metadata(mdo, md->constant_encoding());
2608   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
< prev index next >