< prev index next >

src/hotspot/share/c1/c1_LinearScan.cpp

Print this page

1223   switch (op->code()) {
1224     case lir_move:      // fall through
1225     case lir_convert: {
1226       assert(op->as_Op1() != NULL, "lir_move, lir_convert must be LIR_Op1");
1227       LIR_Op1* move = (LIR_Op1*)op;
1228 
1229       LIR_Opr move_from = move->in_opr();
1230       LIR_Opr move_to = move->result_opr();
1231 
1232       if (move_to->is_register() && move_from->is_register()) {
1233         Interval* from = interval_at(reg_num(move_from));
1234         Interval* to = interval_at(reg_num(move_to));
1235         if (from != NULL && to != NULL) {
1236           to->set_register_hint(from);
1237           TRACE_LINEAR_SCAN(4, tty->print_cr("operation at op_id %d: added hint from interval %d to %d", move->id(), from->reg_num(), to->reg_num()));
1238         }
1239       }
1240       break;
1241     }
1242     case lir_cmove: {

1243       assert(op->as_Op2() != NULL, "lir_cmove must be LIR_Op2");
1244       LIR_Op2* cmove = (LIR_Op2*)op;
1245 
1246       LIR_Opr move_from = cmove->in_opr1();
1247       LIR_Opr move_to = cmove->result_opr();



1248 



1249       if (move_to->is_register() && move_from->is_register()) {
1250         Interval* from = interval_at(reg_num(move_from));
1251         Interval* to = interval_at(reg_num(move_to));
1252         if (from != NULL && to != NULL) {
1253           to->set_register_hint(from);
1254           TRACE_LINEAR_SCAN(4, tty->print_cr("operation at op_id %d: added hint from interval %d to %d", cmove->id(), from->reg_num(), to->reg_num()));
1255         }
1256       }
1257       break;
1258     }
1259     default:
1260       break;
1261   }
1262 }
1263 
1264 
1265 void LinearScan::build_intervals() {
1266   TIME_LINEAR_SCAN(timer_build_intervals);
1267 
1268   // initialize interval list with expected number of intervals

6140       return;
6141     }
6142     assert(sux->pred_at(0) == block, "invalid control flow");
6143     assert(!sux->is_set(BlockBegin::exception_entry_flag), "exception handlers not allowed");
6144 
6145     // ignore the label at the beginning of the block
6146     append_instructions(sux_instructions, 1);
6147   }
6148 
6149   // process lir-instructions while all successors begin with the same instruction
6150   while (true) {
6151     LIR_Op* op = instruction_at(0);
6152     for (i = 1; i < num_sux; i++) {
6153       if (operations_different(op, instruction_at(i))) {
6154         // these instructions are different and cannot be optimized ->
6155         // no further optimization possible
6156         return;
6157       }
6158     }
6159 










6160     TRACE_LINEAR_SCAN(4, tty->print("----- found instruction that is equal in all %d successors: ", num_sux); op->print());
6161 
6162     // insert instruction at end of current block
6163     block->lir()->insert_before(insert_idx, op);
6164     insert_idx++;
6165 
6166     // delete the instructions at the beginning of all successors
6167     for (i = 0; i < num_sux; i++) {
6168       remove_cur_instruction(i, false);
6169     }
6170   }
6171 }
6172 
6173 
6174 // Implementation of ControlFlowOptimizer
6175 
6176 ControlFlowOptimizer::ControlFlowOptimizer() :
6177   _original_preds(4)
6178 {
6179 }

6347 
6348       assert(last_branch->block() != NULL, "last branch must always have a block as target");
6349       assert(last_branch->label() == last_branch->block()->label(), "must be equal");
6350 
6351       if (last_branch->info() == NULL) {
6352         if (last_branch->block() == code->at(i + 1)) {
6353 
6354           TRACE_LINEAR_SCAN(3, tty->print_cr("Deleting unconditional branch at end of block B%d", block->block_id()));
6355 
6356           // delete last branch instruction
6357           instructions->trunc_to(instructions->length() - 1);
6358 
6359         } else {
6360           LIR_Op* prev_op = instructions->at(instructions->length() - 2);
6361           if (prev_op->code() == lir_branch || prev_op->code() == lir_cond_float_branch) {
6362             assert(prev_op->as_OpBranch() != NULL, "branch must be of type LIR_OpBranch");
6363             LIR_OpBranch* prev_branch = (LIR_OpBranch*)prev_op;
6364 
6365             if (prev_branch->stub() == NULL) {
6366 

6367               LIR_Op2* prev_cmp = NULL;
6368               // There might be a cmove inserted for profiling which depends on the same
6369               // compare. If we change the condition of the respective compare, we have
6370               // to take care of this cmove as well.
6371               LIR_Op2* prev_cmove = NULL;
6372 
6373               for(int j = instructions->length() - 3; j >= 0 && prev_cmp == NULL; j--) {
6374                 prev_op = instructions->at(j);
6375                 // check for the cmove
6376                 if (prev_op->code() == lir_cmove) {
6377                   assert(prev_op->as_Op2() != NULL, "cmove must be of type LIR_Op2");
6378                   prev_cmove = (LIR_Op2*)prev_op;
6379                   assert(prev_branch->cond() == prev_cmove->condition(), "should be the same");
6380                 }
6381                 if (prev_op->code() == lir_cmp) {
6382                   assert(prev_op->as_Op2() != NULL, "branch must be of type LIR_Op2");
6383                   prev_cmp = (LIR_Op2*)prev_op;
6384                   assert(prev_branch->cond() == prev_cmp->condition(), "should be the same");
6385                 }
6386               }
6387               // Guarantee because it is dereferenced below.
6388               guarantee(prev_cmp != NULL, "should have found comp instruction for branch");

6389               if (prev_branch->block() == code->at(i + 1) && prev_branch->info() == NULL) {
6390 
6391                 TRACE_LINEAR_SCAN(3, tty->print_cr("Negating conditional branch and deleting unconditional branch at end of block B%d", block->block_id()));
6392 
6393                 // eliminate a conditional branch to the immediate successor
6394                 prev_branch->change_block(last_branch->block());
6395                 prev_branch->negate_cond();

6396                 prev_cmp->set_condition(prev_branch->cond());

6397                 instructions->trunc_to(instructions->length() - 1);

6398                 // if we do change the condition, we have to change the cmove as well
6399                 if (prev_cmove != NULL) {
6400                   prev_cmove->set_condition(prev_branch->cond());
6401                   LIR_Opr t = prev_cmove->in_opr1();
6402                   prev_cmove->set_in_opr1(prev_cmove->in_opr2());
6403                   prev_cmove->set_in_opr2(t);
6404                 }

6405               }
6406             }
6407           }
6408         }
6409       }
6410     }
6411   }
6412 
6413   DEBUG_ONLY(verify(code));
6414 }
6415 
6416 void ControlFlowOptimizer::delete_jumps_to_return(BlockList* code) {
6417 #ifdef ASSERT
6418   ResourceBitMap return_converted(BlockBegin::number_of_blocks());
6419 #endif
6420 
6421   for (int i = code->length() - 1; i >= 0; i--) {
6422     BlockBegin* block = code->at(i);
6423     LIR_OpList* cur_instructions = block->lir()->instructions_list();
6424     LIR_Op*     cur_last_op = cur_instructions->last();

6690             } else {
6691               ShouldNotReachHere();
6692             }
6693           } else if (in->is_stack()) {
6694             if (res->is_register()) {
6695               inc_counter(counter_move_stack_reg);
6696             } else {
6697               inc_counter(counter_move_stack_stack);
6698             }
6699           } else if (in->is_address()) {
6700             assert(res->is_register(), "must be");
6701             inc_counter(counter_move_mem_reg);
6702           } else if (in->is_constant()) {
6703             inc_counter(counter_move_const_any);
6704           } else {
6705             ShouldNotReachHere();
6706           }
6707           break;
6708         }
6709 

6710         case lir_cmp:             inc_counter(counter_cmp); break;

6711 
6712         case lir_branch:
6713         case lir_cond_float_branch: {
6714           LIR_OpBranch* branch = op->as_OpBranch();
6715           if (branch->block() == NULL) {
6716             inc_counter(counter_stub_branch);
6717           } else if (branch->cond() == lir_cond_always) {
6718             inc_counter(counter_uncond_branch);
6719           } else {
6720             inc_counter(counter_cond_branch);
6721           }
6722           break;
6723         }
6724 
6725         case lir_neg:
6726         case lir_add:
6727         case lir_sub:
6728         case lir_mul:
6729         case lir_div:
6730         case lir_rem:

1223   switch (op->code()) {
1224     case lir_move:      // fall through
1225     case lir_convert: {
1226       assert(op->as_Op1() != NULL, "lir_move, lir_convert must be LIR_Op1");
1227       LIR_Op1* move = (LIR_Op1*)op;
1228 
1229       LIR_Opr move_from = move->in_opr();
1230       LIR_Opr move_to = move->result_opr();
1231 
1232       if (move_to->is_register() && move_from->is_register()) {
1233         Interval* from = interval_at(reg_num(move_from));
1234         Interval* to = interval_at(reg_num(move_to));
1235         if (from != NULL && to != NULL) {
1236           to->set_register_hint(from);
1237           TRACE_LINEAR_SCAN(4, tty->print_cr("operation at op_id %d: added hint from interval %d to %d", move->id(), from->reg_num(), to->reg_num()));
1238         }
1239       }
1240       break;
1241     }
1242     case lir_cmove: {
1243 #ifndef RISCV
1244       assert(op->as_Op2() != NULL, "lir_cmove must be LIR_Op2");
1245       LIR_Op2* cmove = (LIR_Op2*)op;
1246 
1247       LIR_Opr move_from = cmove->in_opr1();
1248       LIR_Opr move_to = cmove->result_opr();
1249 #else
1250       assert(op->as_Op4() != NULL, "lir_cmove must be LIR_Op4");
1251       LIR_Op4* cmove = (LIR_Op4*)op;
1252 
1253       LIR_Opr move_from = cmove->in_opr3();
1254       LIR_Opr move_to   = cmove->in_opr4();
1255 #endif
1256       if (move_to->is_register() && move_from->is_register()) {
1257         Interval* from = interval_at(reg_num(move_from));
1258         Interval* to = interval_at(reg_num(move_to));
1259         if (from != NULL && to != NULL) {
1260           to->set_register_hint(from);
1261           TRACE_LINEAR_SCAN(4, tty->print_cr("operation at op_id %d: added hint from interval %d to %d", cmove->id(), from->reg_num(), to->reg_num()));
1262         }
1263       }
1264       break;
1265     }
1266     default:
1267       break;
1268   }
1269 }
1270 
1271 
1272 void LinearScan::build_intervals() {
1273   TIME_LINEAR_SCAN(timer_build_intervals);
1274 
1275   // initialize interval list with expected number of intervals

6147       return;
6148     }
6149     assert(sux->pred_at(0) == block, "invalid control flow");
6150     assert(!sux->is_set(BlockBegin::exception_entry_flag), "exception handlers not allowed");
6151 
6152     // ignore the label at the beginning of the block
6153     append_instructions(sux_instructions, 1);
6154   }
6155 
6156   // process lir-instructions while all successors begin with the same instruction
6157   while (true) {
6158     LIR_Op* op = instruction_at(0);
6159     for (i = 1; i < num_sux; i++) {
6160       if (operations_different(op, instruction_at(i))) {
6161         // these instructions are different and cannot be optimized ->
6162         // no further optimization possible
6163         return;
6164       }
6165     }
6166 
6167 #ifdef RISCV
6168     // Some platforms, such as riscv64, s390 and aarch64, the branch instruction may contain register operands.
6169     // If the move instruction would change the branch instruction's operand after the optimization, we can't apply it.
6170     if (branch->as_Op2() != NULL) {
6171       LIR_Op2* branch_op2 = (LIR_Op2*)branch;
6172       if (op->result_opr()->has_common_register(branch_op2->in_opr1())) return;
6173       if (op->result_opr()->has_common_register(branch_op2->in_opr2())) return;
6174     }
6175 #endif
6176 
6177     TRACE_LINEAR_SCAN(4, tty->print("----- found instruction that is equal in all %d successors: ", num_sux); op->print());
6178 
6179     // insert instruction at end of current block
6180     block->lir()->insert_before(insert_idx, op);
6181     insert_idx++;
6182 
6183     // delete the instructions at the beginning of all successors
6184     for (i = 0; i < num_sux; i++) {
6185       remove_cur_instruction(i, false);
6186     }
6187   }
6188 }
6189 
6190 
6191 // Implementation of ControlFlowOptimizer
6192 
6193 ControlFlowOptimizer::ControlFlowOptimizer() :
6194   _original_preds(4)
6195 {
6196 }

6364 
6365       assert(last_branch->block() != NULL, "last branch must always have a block as target");
6366       assert(last_branch->label() == last_branch->block()->label(), "must be equal");
6367 
6368       if (last_branch->info() == NULL) {
6369         if (last_branch->block() == code->at(i + 1)) {
6370 
6371           TRACE_LINEAR_SCAN(3, tty->print_cr("Deleting unconditional branch at end of block B%d", block->block_id()));
6372 
6373           // delete last branch instruction
6374           instructions->trunc_to(instructions->length() - 1);
6375 
6376         } else {
6377           LIR_Op* prev_op = instructions->at(instructions->length() - 2);
6378           if (prev_op->code() == lir_branch || prev_op->code() == lir_cond_float_branch) {
6379             assert(prev_op->as_OpBranch() != NULL, "branch must be of type LIR_OpBranch");
6380             LIR_OpBranch* prev_branch = (LIR_OpBranch*)prev_op;
6381 
6382             if (prev_branch->stub() == NULL) {
6383 
6384 #ifndef RISCV
6385               LIR_Op2* prev_cmp = NULL;
6386               // There might be a cmove inserted for profiling which depends on the same
6387               // compare. If we change the condition of the respective compare, we have
6388               // to take care of this cmove as well.
6389               LIR_Op2* prev_cmove = NULL;
6390 
6391               for(int j = instructions->length() - 3; j >= 0 && prev_cmp == NULL; j--) {
6392                 prev_op = instructions->at(j);
6393                 // check for the cmove
6394                 if (prev_op->code() == lir_cmove) {
6395                   assert(prev_op->as_Op2() != NULL, "cmove must be of type LIR_Op2");
6396                   prev_cmove = (LIR_Op2*)prev_op;
6397                   assert(prev_branch->cond() == prev_cmove->condition(), "should be the same");
6398                 }
6399                 if (prev_op->code() == lir_cmp) {
6400                   assert(prev_op->as_Op2() != NULL, "branch must be of type LIR_Op2");
6401                   prev_cmp = (LIR_Op2*)prev_op;
6402                   assert(prev_branch->cond() == prev_cmp->condition(), "should be the same");
6403                 }
6404               }
6405               // Guarantee because it is dereferenced below.
6406               guarantee(prev_cmp != NULL, "should have found comp instruction for branch");
6407 #endif
6408               if (prev_branch->block() == code->at(i + 1) && prev_branch->info() == NULL) {
6409 
6410                 TRACE_LINEAR_SCAN(3, tty->print_cr("Negating conditional branch and deleting unconditional branch at end of block B%d", block->block_id()));
6411 
6412                 // eliminate a conditional branch to the immediate successor
6413                 prev_branch->change_block(last_branch->block());
6414                 prev_branch->negate_cond();
6415 #ifndef RISCV
6416                 prev_cmp->set_condition(prev_branch->cond());
6417 #endif
6418                 instructions->trunc_to(instructions->length() - 1);
6419 #ifndef RISCV
6420                 // if we do change the condition, we have to change the cmove as well
6421                 if (prev_cmove != NULL) {
6422                   prev_cmove->set_condition(prev_branch->cond());
6423                   LIR_Opr t = prev_cmove->in_opr1();
6424                   prev_cmove->set_in_opr1(prev_cmove->in_opr2());
6425                   prev_cmove->set_in_opr2(t);
6426                 }
6427 #endif
6428               }
6429             }
6430           }
6431         }
6432       }
6433     }
6434   }
6435 
6436   DEBUG_ONLY(verify(code));
6437 }
6438 
6439 void ControlFlowOptimizer::delete_jumps_to_return(BlockList* code) {
6440 #ifdef ASSERT
6441   ResourceBitMap return_converted(BlockBegin::number_of_blocks());
6442 #endif
6443 
6444   for (int i = code->length() - 1; i >= 0; i--) {
6445     BlockBegin* block = code->at(i);
6446     LIR_OpList* cur_instructions = block->lir()->instructions_list();
6447     LIR_Op*     cur_last_op = cur_instructions->last();

6713             } else {
6714               ShouldNotReachHere();
6715             }
6716           } else if (in->is_stack()) {
6717             if (res->is_register()) {
6718               inc_counter(counter_move_stack_reg);
6719             } else {
6720               inc_counter(counter_move_stack_stack);
6721             }
6722           } else if (in->is_address()) {
6723             assert(res->is_register(), "must be");
6724             inc_counter(counter_move_mem_reg);
6725           } else if (in->is_constant()) {
6726             inc_counter(counter_move_const_any);
6727           } else {
6728             ShouldNotReachHere();
6729           }
6730           break;
6731         }
6732 
6733 #ifndef RISCV
6734         case lir_cmp:             inc_counter(counter_cmp); break;
6735 #endif
6736 
6737         case lir_branch:
6738         case lir_cond_float_branch: {
6739           LIR_OpBranch* branch = op->as_OpBranch();
6740           if (branch->block() == NULL) {
6741             inc_counter(counter_stub_branch);
6742           } else if (branch->cond() == lir_cond_always) {
6743             inc_counter(counter_uncond_branch);
6744           } else {
6745             inc_counter(counter_cond_branch);
6746           }
6747           break;
6748         }
6749 
6750         case lir_neg:
6751         case lir_add:
6752         case lir_sub:
6753         case lir_mul:
6754         case lir_div:
6755         case lir_rem:
< prev index next >