< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page
@@ -39,10 +39,11 @@
  #include "oops/arrayOop.hpp"
  #include "oops/method.hpp"
  #include "oops/methodCounters.hpp"
  #include "oops/methodData.hpp"
  #include "oops/oop.inline.hpp"
+ #include "oops/inlineKlass.hpp"
  #include "oops/resolvedIndyEntry.hpp"
  #include "oops/resolvedMethodEntry.hpp"
  #include "prims/jvmtiExport.hpp"
  #include "prims/jvmtiThreadState.hpp"
  #include "runtime/arguments.hpp"

@@ -465,10 +466,15 @@
    // Restore stack bottom in case i2c adjusted stack
    __ ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
    __ lea(esp, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
    // and null it as marker that esp is now tos until next java call
    __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
+ 
+   if (state == atos && InlineTypeReturnedAsFields) {
+     __ store_inline_type_fields_to_buf(nullptr, true);
+   }
+ 
    __ restore_bcp();
    __ restore_locals();
    __ restore_constant_pool_cache();
    __ get_method(rmethod);
  

@@ -1656,11 +1662,11 @@
  }
  
  //
  // Generic interpreted method entry to (asm) interpreter
  //
- address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
+ address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool object_init) {
    // determine code generation flags
    bool inc_counter  = UseCompiler || CountCompiledCalls;
  
    // rscratch1: sender sp
    address entry_point = __ pc();

@@ -1783,10 +1789,16 @@
        __ bind(L);
      }
  #endif
    }
  
+   // Issue a StoreStore barrier on entry to Object_init if the
+   // class has strict field fields.  Be lazy, always do it.
+   if (object_init) {
+     __ membar(MacroAssembler::StoreStore);
+   }
+ 
    // start execution
  #ifdef ASSERT
    {
      Label L;
       const Address monitor_block_top (rfp,
< prev index next >