< prev index next >

src/hotspot/share/c1/c1_LIRGenerator.cpp

Print this page

 589     assert(right_op != result_op, "malformed");
 590     __ move(left_op, result_op);
 591     left_op = result_op;
 592   }
 593 
 594   switch(code) {
 595     case Bytecodes::_iand:
 596     case Bytecodes::_land:  __ logical_and(left_op, right_op, result_op); break;
 597 
 598     case Bytecodes::_ior:
 599     case Bytecodes::_lor:   __ logical_or(left_op, right_op, result_op);  break;
 600 
 601     case Bytecodes::_ixor:
 602     case Bytecodes::_lxor:  __ logical_xor(left_op, right_op, result_op); break;
 603 
 604     default: ShouldNotReachHere();
 605   }
 606 }
 607 
 608 
 609 void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info) {
 610   if (!GenerateSynchronizationCode) return;
 611   // for slow path, use debug info for state after successful locking
 612   CodeStub* slow_path = new MonitorEnterStub(object, lock, info);
 613   __ load_stack_address_monitor(monitor_no, lock);

 614   // for handling NullPointerException, use debug info representing just the lock stack before this monitorenter
 615   __ lock_object(hdr, object, lock, scratch, slow_path, info_for_exception);
 616 }
 617 
 618 
 619 void LIRGenerator::monitor_exit(LIR_Opr object, LIR_Opr lock, LIR_Opr new_hdr, LIR_Opr scratch, int monitor_no) {
 620   if (!GenerateSynchronizationCode) return;
 621   // setup registers
 622   LIR_Opr hdr = lock;
 623   lock = new_hdr;
 624   CodeStub* slow_path = new MonitorExitStub(lock, !UseHeavyMonitors, monitor_no);
 625   __ load_stack_address_monitor(monitor_no, lock);
 626   __ unlock_object(hdr, object, lock, scratch, slow_path);

 627 }
 628 
 629 #ifndef PRODUCT
 630 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
 631   if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
 632     tty->print_cr("   ###class not loaded at new bci %d", new_instance->printable_bci());
 633   } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
 634     tty->print_cr("   ###class not resolved at new bci %d", new_instance->printable_bci());
 635   }
 636 }
 637 #endif
 638 
 639 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
 640   klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
 641   // If klass is not loaded we do not know if the klass has finalizers:
 642   if (UseFastNewInstance && klass->is_loaded()
 643       && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
 644 
 645     Runtime1::StubID stub_id = klass->is_initialized() ? Runtime1::fast_new_instance_id : Runtime1::fast_new_instance_init_check_id;
 646 

1224   LIRItem clazz(x->argument_at(0), this);
1225   LIRItem object(x->argument_at(1), this);
1226   clazz.load_item();
1227   object.load_item();
1228   LIR_Opr result = rlock_result(x);
1229 
1230   // need to perform null check on clazz
1231   if (x->needs_null_check()) {
1232     CodeEmitInfo* info = state_for(x);
1233     __ null_check(clazz.result(), info);
1234   }
1235 
1236   LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
1237                                      CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
1238                                      x->type(),
1239                                      NULL); // NULL CodeEmitInfo results in a leaf call
1240   __ move(call_result, result);
1241 }
1242 
1243 void LIRGenerator::load_klass(LIR_Opr obj, LIR_Opr klass, CodeEmitInfo* null_check_info) {
1244   __ load_klass(obj, klass, null_check_info);

1245 }
1246 
1247 // Example: object.getClass ()
1248 void LIRGenerator::do_getClass(Intrinsic* x) {
1249   assert(x->number_of_arguments() == 1, "wrong type");
1250 
1251   LIRItem rcvr(x->argument_at(0), this);
1252   rcvr.load_item();
1253   LIR_Opr temp = new_register(T_ADDRESS);
1254   LIR_Opr result = rlock_result(x);
1255 
1256   // need to perform the null check on the rcvr
1257   CodeEmitInfo* info = NULL;
1258   if (x->needs_null_check()) {
1259     info = state_for(x);
1260   }
1261 
1262   LIR_Opr klass = new_register(T_METADATA);
1263   load_klass(rcvr.result(), klass, info);
1264   __ move_wide(new LIR_Address(klass, in_bytes(Klass::java_mirror_offset()), T_ADDRESS), temp);

2656     args->append(getThreadPointer());
2657     LIR_Opr meth = new_register(T_METADATA);
2658     __ metadata2reg(method()->constant_encoding(), meth);
2659     args->append(meth);
2660     call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, NULL);
2661   }
2662 
2663   if (method()->is_synchronized()) {
2664     LIR_Opr obj;
2665     if (method()->is_static()) {
2666       obj = new_register(T_OBJECT);
2667       __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2668     } else {
2669       Local* receiver = x->state()->local_at(0)->as_Local();
2670       assert(receiver != NULL, "must already exist");
2671       obj = receiver->operand();
2672     }
2673     assert(obj->is_valid(), "must be valid");
2674 
2675     if (method()->is_synchronized() && GenerateSynchronizationCode) {
2676       LIR_Opr lock = syncLockOpr();
2677       __ load_stack_address_monitor(0, lock);
2678 
2679       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL, x->check_flag(Instruction::DeoptimizeOnException));
2680       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2681 
2682       // receiver is guaranteed non-NULL so don't need CodeEmitInfo
2683       __ lock_object(syncTempOpr(), obj, lock, new_register(T_OBJECT), slow_path, NULL);
2684     }
2685   }
2686   // increment invocation counters if needed
2687   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
2688     profile_parameters(x);
2689     CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL, false);
2690     increment_invocation_counter(info);
2691   }
2692 
2693   // all blocks with a successor must end with an unconditional jump
2694   // to the successor even if they are consecutive
2695   __ jump(x->default_sux());
2696 }
2697 
2698 
2699 void LIRGenerator::do_OsrEntry(OsrEntry* x) {
2700   // construct our frame and model the production of incoming pointer
2701   // to the OSR buffer.
2702   __ osr_entry(LIR_Assembler::osrBufferPointer());
2703   LIR_Opr result = rlock_result(x);

 589     assert(right_op != result_op, "malformed");
 590     __ move(left_op, result_op);
 591     left_op = result_op;
 592   }
 593 
 594   switch(code) {
 595     case Bytecodes::_iand:
 596     case Bytecodes::_land:  __ logical_and(left_op, right_op, result_op); break;
 597 
 598     case Bytecodes::_ior:
 599     case Bytecodes::_lor:   __ logical_or(left_op, right_op, result_op);  break;
 600 
 601     case Bytecodes::_ixor:
 602     case Bytecodes::_lxor:  __ logical_xor(left_op, right_op, result_op); break;
 603 
 604     default: ShouldNotReachHere();
 605   }
 606 }
 607 
 608 
 609 void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr tmp1, LIR_Opr tmp2, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info) {
 610   if (!GenerateSynchronizationCode) return;
 611   // for slow path, use debug info for state after successful locking
 612   CodeStub* slow_path = new MonitorEnterStub(object, info);
 613   __ load_stack_address_monitor(monitor_no, lock);
 614   __ move(object, new LIR_Address(lock, BasicObjectLock::obj_offset_in_bytes(), T_ADDRESS));
 615   // for handling NullPointerException, use debug info representing just the lock stack before this monitorenter
 616   __ lock_object(hdr, object, tmp1, tmp2, slow_path, info_for_exception);
 617 }
 618 
 619 
 620 void LIRGenerator::monitor_exit(LIR_Opr object, LIR_Opr lock, LIR_Opr new_hdr, LIR_Opr scratch, int monitor_no) {
 621   if (!GenerateSynchronizationCode) return;
 622   // setup registers
 623   CodeStub* slow_path = new MonitorExitStub(object);


 624   __ load_stack_address_monitor(monitor_no, lock);
 625   __ move(new LIR_Address(lock, BasicObjectLock::obj_offset_in_bytes(),T_ADDRESS), object);
 626   __ unlock_object(new_hdr, object, lock, scratch, slow_path);
 627 }
 628 
 629 #ifndef PRODUCT
 630 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
 631   if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
 632     tty->print_cr("   ###class not loaded at new bci %d", new_instance->printable_bci());
 633   } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
 634     tty->print_cr("   ###class not resolved at new bci %d", new_instance->printable_bci());
 635   }
 636 }
 637 #endif
 638 
 639 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
 640   klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
 641   // If klass is not loaded we do not know if the klass has finalizers:
 642   if (UseFastNewInstance && klass->is_loaded()
 643       && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
 644 
 645     Runtime1::StubID stub_id = klass->is_initialized() ? Runtime1::fast_new_instance_id : Runtime1::fast_new_instance_init_check_id;
 646 

1224   LIRItem clazz(x->argument_at(0), this);
1225   LIRItem object(x->argument_at(1), this);
1226   clazz.load_item();
1227   object.load_item();
1228   LIR_Opr result = rlock_result(x);
1229 
1230   // need to perform null check on clazz
1231   if (x->needs_null_check()) {
1232     CodeEmitInfo* info = state_for(x);
1233     __ null_check(clazz.result(), info);
1234   }
1235 
1236   LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
1237                                      CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
1238                                      x->type(),
1239                                      NULL); // NULL CodeEmitInfo results in a leaf call
1240   __ move(call_result, result);
1241 }
1242 
1243 void LIRGenerator::load_klass(LIR_Opr obj, LIR_Opr klass, CodeEmitInfo* null_check_info) {
1244   CodeStub* slow_path = new LoadKlassStub(obj, klass);
1245   __ load_klass(obj, klass, null_check_info, slow_path);
1246 }
1247 
1248 // Example: object.getClass ()
1249 void LIRGenerator::do_getClass(Intrinsic* x) {
1250   assert(x->number_of_arguments() == 1, "wrong type");
1251 
1252   LIRItem rcvr(x->argument_at(0), this);
1253   rcvr.load_item();
1254   LIR_Opr temp = new_register(T_ADDRESS);
1255   LIR_Opr result = rlock_result(x);
1256 
1257   // need to perform the null check on the rcvr
1258   CodeEmitInfo* info = NULL;
1259   if (x->needs_null_check()) {
1260     info = state_for(x);
1261   }
1262 
1263   LIR_Opr klass = new_register(T_METADATA);
1264   load_klass(rcvr.result(), klass, info);
1265   __ move_wide(new LIR_Address(klass, in_bytes(Klass::java_mirror_offset()), T_ADDRESS), temp);

2657     args->append(getThreadPointer());
2658     LIR_Opr meth = new_register(T_METADATA);
2659     __ metadata2reg(method()->constant_encoding(), meth);
2660     args->append(meth);
2661     call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, NULL);
2662   }
2663 
2664   if (method()->is_synchronized()) {
2665     LIR_Opr obj;
2666     if (method()->is_static()) {
2667       obj = new_register(T_OBJECT);
2668       __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2669     } else {
2670       Local* receiver = x->state()->local_at(0)->as_Local();
2671       assert(receiver != NULL, "must already exist");
2672       obj = receiver->operand();
2673     }
2674     assert(obj->is_valid(), "must be valid");
2675 
2676     if (method()->is_synchronized() && GenerateSynchronizationCode) {
2677       LIR_Opr lock = new_register(T_ADDRESS);


2678       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL, x->check_flag(Instruction::DeoptimizeOnException));
2679       monitor_enter(obj, lock, syncTempOpr(), new_register(T_INT), new_register(T_INT), 0, NULL, info);



2680     }
2681   }
2682   // increment invocation counters if needed
2683   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
2684     profile_parameters(x);
2685     CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL, false);
2686     increment_invocation_counter(info);
2687   }
2688 
2689   // all blocks with a successor must end with an unconditional jump
2690   // to the successor even if they are consecutive
2691   __ jump(x->default_sux());
2692 }
2693 
2694 
2695 void LIRGenerator::do_OsrEntry(OsrEntry* x) {
2696   // construct our frame and model the production of incoming pointer
2697   // to the OSR buffer.
2698   __ osr_entry(LIR_Assembler::osrBufferPointer());
2699   LIR_Opr result = rlock_result(x);
< prev index next >