620 default: ShouldNotReachHere();
621 }
622 }
623
624
625 void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info) {
626 if (!GenerateSynchronizationCode) return;
627 // for slow path, use debug info for state after successful locking
628 CodeStub* slow_path = new MonitorEnterStub(object, lock, info);
629 __ load_stack_address_monitor(monitor_no, lock);
630 // for handling NullPointerException, use debug info representing just the lock stack before this monitorenter
631 __ lock_object(hdr, object, lock, scratch, slow_path, info_for_exception);
632 }
633
634
635 void LIRGenerator::monitor_exit(LIR_Opr object, LIR_Opr lock, LIR_Opr new_hdr, LIR_Opr scratch, int monitor_no) {
636 if (!GenerateSynchronizationCode) return;
637 // setup registers
638 LIR_Opr hdr = lock;
639 lock = new_hdr;
640 CodeStub* slow_path = new MonitorExitStub(lock, UseFastLocking, monitor_no);
641 __ load_stack_address_monitor(monitor_no, lock);
642 __ unlock_object(hdr, object, lock, scratch, slow_path);
643 }
644
645 #ifndef PRODUCT
646 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
647 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
648 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
649 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
650 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
651 }
652 }
653 #endif
654
655 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
656 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
657 // If klass is not loaded we do not know if the klass has finalizers:
658 if (UseFastNewInstance && klass->is_loaded()
659 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
660
1241 LIRItem clazz(x->argument_at(0), this);
1242 LIRItem object(x->argument_at(1), this);
1243 clazz.load_item();
1244 object.load_item();
1245 LIR_Opr result = rlock_result(x);
1246
1247 // need to perform null check on clazz
1248 if (x->needs_null_check()) {
1249 CodeEmitInfo* info = state_for(x);
1250 __ null_check(clazz.result(), info);
1251 }
1252
1253 LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
1254 CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
1255 x->type(),
1256 NULL); // NULL CodeEmitInfo results in a leaf call
1257 __ move(call_result, result);
1258 }
1259
1260 void LIRGenerator::load_klass(LIR_Opr obj, LIR_Opr klass, CodeEmitInfo* null_check_info) {
1261 __ load_klass(obj, klass, null_check_info);
1262 }
1263
1264 // Example: object.getClass ()
1265 void LIRGenerator::do_getClass(Intrinsic* x) {
1266 assert(x->number_of_arguments() == 1, "wrong type");
1267
1268 LIRItem rcvr(x->argument_at(0), this);
1269 rcvr.load_item();
1270 LIR_Opr temp = new_register(T_ADDRESS);
1271 LIR_Opr result = rlock_result(x);
1272
1273 // need to perform the null check on the rcvr
1274 CodeEmitInfo* info = NULL;
1275 if (x->needs_null_check()) {
1276 info = state_for(x);
1277 }
1278
1279 LIR_Opr klass = new_register(T_METADATA);
1280 load_klass(rcvr.result(), klass, info);
1281 __ move_wide(new LIR_Address(klass, in_bytes(Klass::java_mirror_offset()), T_ADDRESS), temp);
|
620 default: ShouldNotReachHere();
621 }
622 }
623
624
625 void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info) {
626 if (!GenerateSynchronizationCode) return;
627 // for slow path, use debug info for state after successful locking
628 CodeStub* slow_path = new MonitorEnterStub(object, lock, info);
629 __ load_stack_address_monitor(monitor_no, lock);
630 // for handling NullPointerException, use debug info representing just the lock stack before this monitorenter
631 __ lock_object(hdr, object, lock, scratch, slow_path, info_for_exception);
632 }
633
634
635 void LIRGenerator::monitor_exit(LIR_Opr object, LIR_Opr lock, LIR_Opr new_hdr, LIR_Opr scratch, int monitor_no) {
636 if (!GenerateSynchronizationCode) return;
637 // setup registers
638 LIR_Opr hdr = lock;
639 lock = new_hdr;
640 CodeStub* slow_path = new MonitorExitStub(lock, LockingMode != LM_MONITOR, monitor_no);
641 __ load_stack_address_monitor(monitor_no, lock);
642 __ unlock_object(hdr, object, lock, scratch, slow_path);
643 }
644
645 #ifndef PRODUCT
646 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
647 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
648 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
649 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
650 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
651 }
652 }
653 #endif
654
655 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
656 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
657 // If klass is not loaded we do not know if the klass has finalizers:
658 if (UseFastNewInstance && klass->is_loaded()
659 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
660
1241 LIRItem clazz(x->argument_at(0), this);
1242 LIRItem object(x->argument_at(1), this);
1243 clazz.load_item();
1244 object.load_item();
1245 LIR_Opr result = rlock_result(x);
1246
1247 // need to perform null check on clazz
1248 if (x->needs_null_check()) {
1249 CodeEmitInfo* info = state_for(x);
1250 __ null_check(clazz.result(), info);
1251 }
1252
1253 LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
1254 CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
1255 x->type(),
1256 NULL); // NULL CodeEmitInfo results in a leaf call
1257 __ move(call_result, result);
1258 }
1259
1260 void LIRGenerator::load_klass(LIR_Opr obj, LIR_Opr klass, CodeEmitInfo* null_check_info) {
1261 CodeStub* slow_path = UseCompactObjectHeaders ? new LoadKlassStub(klass) : NULL;
1262 __ load_klass(obj, klass, null_check_info, slow_path);
1263 }
1264
1265 // Example: object.getClass ()
1266 void LIRGenerator::do_getClass(Intrinsic* x) {
1267 assert(x->number_of_arguments() == 1, "wrong type");
1268
1269 LIRItem rcvr(x->argument_at(0), this);
1270 rcvr.load_item();
1271 LIR_Opr temp = new_register(T_ADDRESS);
1272 LIR_Opr result = rlock_result(x);
1273
1274 // need to perform the null check on the rcvr
1275 CodeEmitInfo* info = NULL;
1276 if (x->needs_null_check()) {
1277 info = state_for(x);
1278 }
1279
1280 LIR_Opr klass = new_register(T_METADATA);
1281 load_klass(rcvr.result(), klass, info);
1282 __ move_wide(new LIR_Address(klass, in_bytes(Klass::java_mirror_offset()), T_ADDRESS), temp);
|