< prev index next >

src/hotspot/share/c1/c1_CodeStubs.hpp

Print this page

 40 
 41 // CodeStubs are little 'out-of-line' pieces of code that
 42 // usually handle slow cases of operations. All code stubs
 43 // are collected and code is emitted at the end of the
 44 // nmethod.
 45 
 46 class CodeStub: public CompilationResourceObj {
 47  protected:
 48   Label _entry;                                  // label at the stub entry point
 49   Label _continuation;                           // label where stub continues, if any
 50 
 51  public:
 52   CodeStub() {}
 53 
 54   // code generation
 55   void assert_no_unbound_labels()                { assert(!_entry.is_unbound() && !_continuation.is_unbound(), "unbound label"); }
 56   virtual void emit_code(LIR_Assembler* e) = 0;
 57   virtual CodeEmitInfo* info() const             { return NULL; }
 58   virtual bool is_exception_throw_stub() const   { return false; }
 59   virtual bool is_simple_exception_stub() const  { return false; }

 60 #ifndef PRODUCT
 61   virtual void print_name(outputStream* out) const = 0;
 62 #endif
 63 
 64   // label access
 65   Label* entry()                                 { return &_entry; }
 66   Label* continuation()                          { return &_continuation; }
 67   // for LIR
 68   virtual void visit(LIR_OpVisitState* visit) = 0;
 69 };
 70 
 71 class CodeStubList: public GrowableArray<CodeStub*> {
 72  public:
 73   CodeStubList(): GrowableArray<CodeStub*>() {}
 74 
 75   void append(CodeStub* stub) {
 76     if (!contains(stub)) {
 77       GrowableArray<CodeStub*>::append(stub);
 78     }
 79   }

395   CodeEmitInfo* _info;
396   int           _index;  // index of the patchable oop or Klass* in nmethod or metadata table if needed
397   static int    _patch_info_offset;
398 
399   void align_patch_site(MacroAssembler* masm);
400 
401  public:
402   static int patch_info_offset() { return _patch_info_offset; }
403 
404   PatchingStub(MacroAssembler* masm, PatchID id, int index = -1):
405       _id(id)
406     , _info(NULL)
407     , _index(index) {
408     // force alignment of patch sites so we
409     // can guarantee atomic writes to the patch site.
410     align_patch_site(masm);
411     _pc_start = masm->pc();
412     masm->bind(_patch_site_entry);
413   }
414 







415   void install(MacroAssembler* masm, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {
416     _info = info;
417     _obj = obj;
418     masm->bind(_patch_site_continuation);
419     _bytes_to_copy = masm->pc() - pc_start();
420     if (_id == PatchingStub::access_field_id) {
421       // embed a fixed offset to handle long patches which need to be offset by a word.
422       // the patching code will just add the field offset field to this offset so
423       // that we can reference either the high or low word of a double word field.
424       int field_offset = 0;
425       switch (patch_code) {
426       case lir_patch_low:         field_offset = lo_word_offset_in_bytes; break;
427       case lir_patch_high:        field_offset = hi_word_offset_in_bytes; break;
428       case lir_patch_normal:      field_offset = 0;                       break;
429       default: ShouldNotReachHere();
430       }
431       NativeMovRegMem* n_move = nativeMovRegMem_at(pc_start());
432       n_move->set_offset(field_offset);
433       // Copy will never get executed, so only copy the part which is required for patching.
434       _bytes_to_copy = MAX2(n_move->num_bytes_to_end_of_patch(), (int)NativeGeneralJump::instruction_size);

 40 
 41 // CodeStubs are little 'out-of-line' pieces of code that
 42 // usually handle slow cases of operations. All code stubs
 43 // are collected and code is emitted at the end of the
 44 // nmethod.
 45 
 46 class CodeStub: public CompilationResourceObj {
 47  protected:
 48   Label _entry;                                  // label at the stub entry point
 49   Label _continuation;                           // label where stub continues, if any
 50 
 51  public:
 52   CodeStub() {}
 53 
 54   // code generation
 55   void assert_no_unbound_labels()                { assert(!_entry.is_unbound() && !_continuation.is_unbound(), "unbound label"); }
 56   virtual void emit_code(LIR_Assembler* e) = 0;
 57   virtual CodeEmitInfo* info() const             { return NULL; }
 58   virtual bool is_exception_throw_stub() const   { return false; }
 59   virtual bool is_simple_exception_stub() const  { return false; }
 60   virtual int nr_immediate_oops_patched() const  { return 0; }
 61 #ifndef PRODUCT
 62   virtual void print_name(outputStream* out) const = 0;
 63 #endif
 64 
 65   // label access
 66   Label* entry()                                 { return &_entry; }
 67   Label* continuation()                          { return &_continuation; }
 68   // for LIR
 69   virtual void visit(LIR_OpVisitState* visit) = 0;
 70 };
 71 
 72 class CodeStubList: public GrowableArray<CodeStub*> {
 73  public:
 74   CodeStubList(): GrowableArray<CodeStub*>() {}
 75 
 76   void append(CodeStub* stub) {
 77     if (!contains(stub)) {
 78       GrowableArray<CodeStub*>::append(stub);
 79     }
 80   }

396   CodeEmitInfo* _info;
397   int           _index;  // index of the patchable oop or Klass* in nmethod or metadata table if needed
398   static int    _patch_info_offset;
399 
400   void align_patch_site(MacroAssembler* masm);
401 
402  public:
403   static int patch_info_offset() { return _patch_info_offset; }
404 
405   PatchingStub(MacroAssembler* masm, PatchID id, int index = -1):
406       _id(id)
407     , _info(NULL)
408     , _index(index) {
409     // force alignment of patch sites so we
410     // can guarantee atomic writes to the patch site.
411     align_patch_site(masm);
412     _pc_start = masm->pc();
413     masm->bind(_patch_site_entry);
414   }
415 
416   virtual int nr_immediate_oops_patched() const  { 
417     if (_id == load_mirror_id || _id == load_appendix_id) {
418       return 1;
419     }
420     return 0; 
421   }
422 
423   void install(MacroAssembler* masm, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {
424     _info = info;
425     _obj = obj;
426     masm->bind(_patch_site_continuation);
427     _bytes_to_copy = masm->pc() - pc_start();
428     if (_id == PatchingStub::access_field_id) {
429       // embed a fixed offset to handle long patches which need to be offset by a word.
430       // the patching code will just add the field offset field to this offset so
431       // that we can reference either the high or low word of a double word field.
432       int field_offset = 0;
433       switch (patch_code) {
434       case lir_patch_low:         field_offset = lo_word_offset_in_bytes; break;
435       case lir_patch_high:        field_offset = hi_word_offset_in_bytes; break;
436       case lir_patch_normal:      field_offset = 0;                       break;
437       default: ShouldNotReachHere();
438       }
439       NativeMovRegMem* n_move = nativeMovRegMem_at(pc_start());
440       n_move->set_offset(field_offset);
441       // Copy will never get executed, so only copy the part which is required for patching.
442       _bytes_to_copy = MAX2(n_move->num_bytes_to_end_of_patch(), (int)NativeGeneralJump::instruction_size);
< prev index next >