< prev index next >

src/hotspot/share/opto/parse.hpp

Print this page

425   // Must this parse be aborted?
426   bool failing() const { return C->failing_internal(); } // might have cascading effects, not stressing bailouts for now.
427 
428   Block* rpo_at(int rpo) {
429     assert(0 <= rpo && rpo < _block_count, "oob");
430     return &_blocks[rpo];
431   }
432   Block* start_block() {
433     return rpo_at(flow()->start_block()->rpo());
434   }
435   // Can return null if the flow pass did not complete a block.
436   Block* successor_for_bci(int bci) {
437     return block()->successor_for_bci(bci);
438   }
439 
440  private:
441   // Create a JVMS & map for the initial state of this method.
442   SafePointNode* create_entry_map();
443 
444   // OSR helpers
445   Node *fetch_interpreter_state(int index, BasicType bt, Node *local_addrs, Node *local_addrs_base);
446   Node* check_interpreter_type(Node* l, const Type* type, SafePointNode* &bad_type_exit);
447   void  load_interpreter_state(Node* osr_buf);
448 
449   // Functions for managing basic blocks:
450   void init_blocks();
451   void load_state_from(Block* b);
452   void store_state_to(Block* b) { b->record_state(this); }
453 
454   // Parse all the basic blocks.
455   void do_all_blocks();
456 
457   // Parse the current basic block
458   void do_one_block();
459 
460   // Raise an error if we get a bad ciTypeFlow CFG.
461   void handle_missing_successor(int bci);
462 
463   // first actions (before BCI 0)
464   void do_method_entry();
465 
466   // implementation of monitorenter/monitorexit
467   void do_monitor_enter();
468   void do_monitor_exit();
469 
470   // Eagerly create phie throughout the state, to cope with back edges.
471   void ensure_phis_everywhere();
472 
473   // Merge the current mapping into the basic block starting at bci
474   void merge(          int target_bci);
475   // Same as plain merge, except that it allocates a new path number.
476   void merge_new_path( int target_bci);
477   // Merge the current mapping into an exception handler.
478   void merge_exception(int target_bci);
479   // Helper: Merge the current mapping into the given basic block
480   void merge_common(Block* target, int pnum);
481   // Helper functions for merging individual cells.
482   PhiNode *ensure_phi(       int idx, bool nocreate = false);
483   PhiNode *ensure_memory_phi(int idx, bool nocreate = false);
484   // Helper to merge the current memory state into the given basic block
485   void merge_memory_edges(MergeMemNode* n, int pnum, bool nophi);
486 
487   // Parse this bytecode, and alter the Parsers JVM->Node mapping
488   void do_one_bytecode();
489 
490   // helper function to generate array store check
491   void array_store_check();
492   // Helper function to generate array load
493   void array_load(BasicType etype);

494   // Helper function to generate array store
495   void array_store(BasicType etype);

496   // Helper function to compute array addressing
497   Node* array_addressing(BasicType type, int vals, const Type*& elemtype);








498 
499   void clinit_deopt();
500 
501   // Pass current map to exits
502   void return_current(Node* value);
503 
504   // Register finalizers on return from Object.<init>
505   void call_register_finalizer();
506 
507   // Insert a compiler safepoint into the graph
508   void add_safepoint();
509 
510   // Insert a compiler safepoint into the graph, if there is a back-branch.
511   void maybe_add_safepoint(int target_bci) {
512     if (target_bci <= bci()) {
513       add_safepoint();
514     }
515   }
516 
517   // Note:  Intrinsic generation routines may be found in library_call.cpp.

523   bool can_not_compile_call_site(ciMethod *dest_method, ciInstanceKlass *klass);
524 
525   // Helper functions for type checking bytecodes:
526   void  do_checkcast();
527   void  do_instanceof();
528 
529   // Helper functions for shifting & arithmetic
530   Node* floating_point_mod(Node* a, Node* b, BasicType type);
531   void l2f();
532 
533   // implementation of _get* and _put* bytecodes
534   void do_getstatic() { do_field_access(true,  false); }
535   void do_getfield () { do_field_access(true,  true); }
536   void do_putstatic() { do_field_access(false, false); }
537   void do_putfield () { do_field_access(false, true); }
538 
539   // common code for making initial checks and forming addresses
540   void do_field_access(bool is_get, bool is_field);
541 
542   // common code for actually performing the load or store
543   void do_get_xxx(Node* obj, ciField* field, bool is_field);
544   void do_put_xxx(Node* obj, ciField* field, bool is_field);
545 


546   // implementation of object creation bytecodes
547   void do_new();
548   void do_newarray(BasicType elemtype);
549   void do_anewarray();
550   void do_multianewarray();
551   Node* expand_multianewarray(ciArrayKlass* array_klass, Node* *lengths, int ndimensions, int nargs);
552 
553   // implementation of jsr/ret
554   void do_jsr();
555   void do_ret();
556 
557   float   dynamic_branch_prediction(float &cnt, BoolTest::mask btest, Node* test);
558   float   branch_prediction(float &cnt, BoolTest::mask btest, int target_bci, Node* test);
559   bool    seems_never_taken(float prob) const;
560   bool    path_is_suitable_for_uncommon_trap(float prob) const;
561 
562   void    do_ifnull(BoolTest::mask btest, Node* c);
563   void    do_if(BoolTest::mask btest, Node* c);





564   int     repush_if_args();
565   void    adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path);
566   void    sharpen_type_after_if(BoolTest::mask btest,
567                                 Node* con, const Type* tcon,
568                                 Node* val, const Type* tval);
569   void    maybe_add_predicate_after_if(Block* path);
570   IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt);
571   void    jump_if_true_fork(IfNode *ifNode, int dest_bci_if_true, bool unc);
572   void    jump_if_false_fork(IfNode *ifNode, int dest_bci_if_false, bool unc);
573   void    jump_if_always_fork(int dest_bci_if_true, bool unc);
574 
575   friend class SwitchRange;
576   void    do_tableswitch();
577   void    do_lookupswitch();
578   void    jump_switch_ranges(Node* a, SwitchRange* lo, SwitchRange* hi, int depth = 0);
579   bool    create_jump_tables(Node* a, SwitchRange* lo, SwitchRange* hi);
580   void    linear_search_switch_ranges(Node* key_val, SwitchRange*& lo, SwitchRange*& hi);
581 
582   // helper function for call statistics
583   void count_compiled_calls(bool at_method_entry, bool is_inline) PRODUCT_RETURN;
584 
585   Node_Notes* make_node_notes(Node_Notes* caller_nn);

425   // Must this parse be aborted?
426   bool failing() const { return C->failing_internal(); } // might have cascading effects, not stressing bailouts for now.
427 
428   Block* rpo_at(int rpo) {
429     assert(0 <= rpo && rpo < _block_count, "oob");
430     return &_blocks[rpo];
431   }
432   Block* start_block() {
433     return rpo_at(flow()->start_block()->rpo());
434   }
435   // Can return null if the flow pass did not complete a block.
436   Block* successor_for_bci(int bci) {
437     return block()->successor_for_bci(bci);
438   }
439 
440  private:
441   // Create a JVMS & map for the initial state of this method.
442   SafePointNode* create_entry_map();
443 
444   // OSR helpers
445   Node* fetch_interpreter_state(int index, const Type* type, Node* local_addrs, Node* local_addrs_base);
446   Node* check_interpreter_type(Node* l, const Type* type, SafePointNode* &bad_type_exit, bool is_larval);
447   void  load_interpreter_state(Node* osr_buf);
448 
449   // Functions for managing basic blocks:
450   void init_blocks();
451   void load_state_from(Block* b);
452   void store_state_to(Block* b) { b->record_state(this); }
453 
454   // Parse all the basic blocks.
455   void do_all_blocks();
456 
457   // Parse the current basic block
458   void do_one_block();
459 
460   // Raise an error if we get a bad ciTypeFlow CFG.
461   void handle_missing_successor(int bci);
462 
463   // first actions (before BCI 0)
464   void do_method_entry();
465 
466   // implementation of monitorenter/monitorexit
467   void do_monitor_enter();
468   void do_monitor_exit();
469 
470   // Eagerly create phie throughout the state, to cope with back edges.
471   void ensure_phis_everywhere();
472 
473   // Merge the current mapping into the basic block starting at bci
474   void merge(          int target_bci);
475   // Same as plain merge, except that it allocates a new path number.
476   void merge_new_path( int target_bci);
477   // Merge the current mapping into an exception handler.
478   void merge_exception(int target_bci);
479   // Helper: Merge the current mapping into the given basic block
480   void merge_common(Block* target, int pnum);
481   // Helper functions for merging individual cells.
482   Node*    ensure_phi(       int idx, bool nocreate = false);
483   PhiNode* ensure_memory_phi(int idx, bool nocreate = false);
484   // Helper to merge the current memory state into the given basic block
485   void merge_memory_edges(MergeMemNode* n, int pnum, bool nophi);
486 
487   // Parse this bytecode, and alter the Parsers JVM->Node mapping
488   void do_one_bytecode();
489 
490   // helper function to generate array store check
491   Node* array_store_check(Node*& adr, const Type*& elemtype);
492   // Helper function to generate array load
493   void array_load(BasicType etype);
494   Node* load_from_unknown_flat_array(Node* array, Node* array_index, const TypeOopPtr* element_ptr);
495   // Helper function to generate array store
496   void array_store(BasicType etype);
497   void store_to_unknown_flat_array(Node* array, Node* idx, Node* non_null_stored_value);
498   // Helper function to compute array addressing
499   Node* array_addressing(BasicType type, int vals, const Type*& elemtype);
500   bool needs_range_check(const TypeInt* size_type, const Node* index) const;
501   Node* create_speculative_inline_type_array_checks(Node* array, const TypeAryPtr* array_type, const Type*& element_type);
502   Node* cast_to_speculative_array_type(Node* array, const TypeAryPtr*& array_type, const Type*& element_type);
503   Node* cast_to_profiled_array_type(Node* const array);
504   Node* speculate_non_null_free_array(Node* array, const TypeAryPtr*& array_type);
505   Node* speculate_non_flat_array(Node* array, const TypeAryPtr* array_type);
506   void create_range_check(Node* idx, Node* ary, const TypeInt* sizetype);
507   Node* record_profile_for_speculation_at_array_load(Node* ld);
508 
509   void clinit_deopt();
510 
511   // Pass current map to exits
512   void return_current(Node* value);
513 
514   // Register finalizers on return from Object.<init>
515   void call_register_finalizer();
516 
517   // Insert a compiler safepoint into the graph
518   void add_safepoint();
519 
520   // Insert a compiler safepoint into the graph, if there is a back-branch.
521   void maybe_add_safepoint(int target_bci) {
522     if (target_bci <= bci()) {
523       add_safepoint();
524     }
525   }
526 
527   // Note:  Intrinsic generation routines may be found in library_call.cpp.

533   bool can_not_compile_call_site(ciMethod *dest_method, ciInstanceKlass *klass);
534 
535   // Helper functions for type checking bytecodes:
536   void  do_checkcast();
537   void  do_instanceof();
538 
539   // Helper functions for shifting & arithmetic
540   Node* floating_point_mod(Node* a, Node* b, BasicType type);
541   void l2f();
542 
543   // implementation of _get* and _put* bytecodes
544   void do_getstatic() { do_field_access(true,  false); }
545   void do_getfield () { do_field_access(true,  true); }
546   void do_putstatic() { do_field_access(false, false); }
547   void do_putfield () { do_field_access(false, true); }
548 
549   // common code for making initial checks and forming addresses
550   void do_field_access(bool is_get, bool is_field);
551 
552   // common code for actually performing the load or store
553   void do_get_xxx(Node* obj, ciField* field);
554   void do_put_xxx(Node* obj, ciField* field, bool is_field);
555 
556   ciType* improve_abstract_inline_type_klass(ciType* field_klass);
557 
558   // implementation of object creation bytecodes
559   void do_new();
560   void do_newarray(BasicType elemtype);
561   void do_newarray();
562   void do_multianewarray();
563   Node* expand_multianewarray(ciArrayKlass* array_klass, Node* *lengths, int ndimensions, int nargs);
564 
565   // implementation of jsr/ret
566   void do_jsr();
567   void do_ret();
568 
569   float   dynamic_branch_prediction(float &cnt, BoolTest::mask btest, Node* test);
570   float   branch_prediction(float &cnt, BoolTest::mask btest, int target_bci, Node* test);
571   bool    seems_never_taken(float prob) const;
572   bool    path_is_suitable_for_uncommon_trap(float prob) const;
573 
574   void    do_ifnull(BoolTest::mask btest, Node* c);
575   void    do_if(BoolTest::mask btest, Node* c, bool can_trap = true, bool new_path = false, Node** ctrl_taken = nullptr);
576   void    do_acmp(BoolTest::mask btest, Node* left, Node* right);
577   void    acmp_always_null_input(Node* input, const TypeOopPtr* tinput, BoolTest::mask btest, Node* eq_region);
578   void    acmp_known_non_inline_type_input(Node* input, const TypeOopPtr* tinput, ProfilePtrKind input_ptr, ciKlass* input_type, BoolTest::mask btest, Node* eq_region);
579   Node*   acmp_null_check(Node* input, const TypeOopPtr* tinput, ProfilePtrKind input_ptr, Node*& null_ctl);
580   void    acmp_unknown_non_inline_type_input(Node* input, const TypeOopPtr* tinput, ProfilePtrKind input_ptr, BoolTest::mask btest, Node* eq_region);
581   int     repush_if_args();
582   void    adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path, bool can_trap = true);
583   void    sharpen_type_after_if(BoolTest::mask btest,
584                                 Node* con, const Type* tcon,
585                                 Node* val, const Type* tval);
586   void    maybe_add_predicate_after_if(Block* path);
587   IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt);
588   void    jump_if_true_fork(IfNode *ifNode, int dest_bci_if_true, bool unc);
589   void    jump_if_false_fork(IfNode *ifNode, int dest_bci_if_false, bool unc);
590   void    jump_if_always_fork(int dest_bci_if_true, bool unc);
591 
592   friend class SwitchRange;
593   void    do_tableswitch();
594   void    do_lookupswitch();
595   void    jump_switch_ranges(Node* a, SwitchRange* lo, SwitchRange* hi, int depth = 0);
596   bool    create_jump_tables(Node* a, SwitchRange* lo, SwitchRange* hi);
597   void    linear_search_switch_ranges(Node* key_val, SwitchRange*& lo, SwitchRange*& hi);
598 
599   // helper function for call statistics
600   void count_compiled_calls(bool at_method_entry, bool is_inline) PRODUCT_RETURN;
601 
602   Node_Notes* make_node_notes(Node_Notes* caller_nn);
< prev index next >