< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page
*** 470,18 ***
    // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
    // and other exception handler continuations do not read it
    current->set_exception_pc(NULL);
  #endif // INCLUDE_JVMCI
  
    // The fastest case first
    CodeBlob* blob = CodeCache::find_blob(return_address);
    CompiledMethod* nm = (blob != NULL) ? blob->as_compiled_method_or_null() : NULL;
    if (nm != NULL) {
      // Set flag if return address is a method handle call site.
      current->set_is_method_handle_return(nm->is_method_handle_return(return_address));
      // native nmethods don't have exception handlers
!     assert(!nm->is_native_method(), "no exception handler");
      assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
      if (nm->is_deopt_pc(return_address)) {
        // If we come here because of a stack overflow, the stack may be
        // unguarded. Reguard the stack otherwise if we return to the
        // deopt blob and the stack bang causes a stack overflow we
--- 470,22 ---
    // JVMCI's ExceptionHandlerStub expects the thread local exception PC to be clear
    // and other exception handler continuations do not read it
    current->set_exception_pc(NULL);
  #endif // INCLUDE_JVMCI
  
+   if (Continuation::is_return_barrier_entry(return_address)) {
+     return StubRoutines::cont_returnBarrierExc();
+   }
+   
    // The fastest case first
    CodeBlob* blob = CodeCache::find_blob(return_address);
    CompiledMethod* nm = (blob != NULL) ? blob->as_compiled_method_or_null() : NULL;
    if (nm != NULL) {
      // Set flag if return address is a method handle call site.
      current->set_is_method_handle_return(nm->is_method_handle_return(return_address));
      // native nmethods don't have exception handlers
!     assert(!nm->is_native_method() || nm->method()->is_continuation_enter_intrinsic(), "no exception handler");
      assert(nm->header_begin() != nm->exception_begin(), "no exception handler");
      if (nm->is_deopt_pc(return_address)) {
        // If we come here because of a stack overflow, the stack may be
        // unguarded. Reguard the stack otherwise if we return to the
        // deopt blob and the stack bang causes a stack overflow we

*** 523,10 ***
--- 527,11 ---
    guarantee(!VtableStubs::contains(return_address), "NULL exceptions in vtables should have been handled already!");
  
  #ifndef PRODUCT
    { ResourceMark rm;
      tty->print_cr("No exception handler found for exception at " INTPTR_FORMAT " - potential problems:", p2i(return_address));
+     os::print_location(tty, (intptr_t)return_address);
      tty->print_cr("a) exception happened in (new?) code stubs/buffers that is not handled here");
      tty->print_cr("b) other problem");
    }
  #endif // PRODUCT
  

*** 548,11 ***
    // Should be an nmethod
    guarantee(cb != NULL && cb->is_compiled(), "safepoint polling: pc must refer to an nmethod");
  
    // Look up the relocation information
    assert(((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc),
!     "safepoint polling: type must be poll");
  
  #ifdef ASSERT
    if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
      tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
      Disassembler::decode(cb);
--- 553,11 ---
    // Should be an nmethod
    guarantee(cb != NULL && cb->is_compiled(), "safepoint polling: pc must refer to an nmethod");
  
    // Look up the relocation information
    assert(((CompiledMethod*)cb)->is_at_poll_or_poll_return(pc),
!       "safepoint polling: type must be poll at pc " INTPTR_FORMAT, p2i(pc));
  
  #ifdef ASSERT
    if (!((NativeInstruction*)pc)->is_safepoint_poll()) {
      tty->print_cr("bad pc: " PTR_FORMAT, p2i(pc));
      Disassembler::decode(cb);

*** 586,10 ***
--- 591,12 ---
  oop SharedRuntime::retrieve_receiver( Symbol* sig, frame caller ) {
    assert(caller.is_interpreted_frame(), "");
    int args_size = ArgumentSizeComputer(sig).size() + 1;
    assert(args_size <= caller.interpreter_frame_expression_stack_size(), "receiver must be on interpreter stack");
    oop result = cast_to_oop(*caller.interpreter_frame_tos_at(args_size - 1));
+   // TODO: Erik: remove after integration with concurrent stack scanning
+   result = NativeAccess<>::oop_load(&result);
    assert(Universe::heap()->is_in(result) && oopDesc::is_oop(result), "receiver must be an oop");
    return result;
  }
  
  

*** 737,17 ***
    }
  #endif
  
    if (t == NULL) {
      ttyLocker ttyl;
!     tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT " and handler bci %d", p2i(ret_pc), handler_bci);
      tty->print_cr("   Exception:");
      exception->print();
      tty->cr();
      tty->print_cr(" Compiled exception table :");
      table.print();
!     nm->print_code();
      guarantee(false, "missing exception handler");
      return NULL;
    }
  
    return nm->code_begin() + t->pco();
--- 744,18 ---
    }
  #endif
  
    if (t == NULL) {
      ttyLocker ttyl;
!     tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT " and handler bci %d, catch_pco: %d", p2i(ret_pc), handler_bci, catch_pco);
      tty->print_cr("   Exception:");
      exception->print();
      tty->cr();
      tty->print_cr(" Compiled exception table :");
      table.print();
!     nm->print();
+     // nm->print_code();
      guarantee(false, "missing exception handler");
      return NULL;
    }
  
    return nm->code_begin() + t->pco();

*** 1084,10 ***
--- 1092,16 ---
  
    // Find caller and bci from vframe
    methodHandle caller(current, vfst.method());
    int          bci   = vfst.bci();
  
+   if (caller->is_continuation_enter_intrinsic()) {
+     bc = Bytecodes::_invokestatic;
+     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
+     return receiver;
+   }
+ 
    Bytecode_invoke bytecode(caller, bci);
    int bytecode_index = bytecode.index();
    bc = bytecode.invoke_code();
  
    methodHandle attached_method(current, extract_attached_method(vfst));

*** 1146,10 ***
--- 1160,11 ---
        }
      }
  
      // Retrieve from a compiled argument list
      receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
+     assert (oopDesc::is_oop_or_null(receiver()), ""); // if (receiver() != NULL) oopDesc::verify(receiver()); // 
  
      if (receiver.is_null()) {
        THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
      }
    }

*** 2124,10 ***
--- 2139,15 ---
  // Handles the uncommon case in locking, i.e., contention or an inflated lock.
  JRT_BLOCK_ENTRY(void, SharedRuntime::complete_monitor_locking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
    SharedRuntime::monitor_enter_helper(obj, lock, current);
  JRT_END
  
+ JRT_BLOCK_ENTRY(void, SharedRuntime::complete_monitor_locking_C_inc_held_monitor_count(oopDesc* obj, BasicLock* lock, JavaThread* current))
+   SharedRuntime::monitor_enter_helper(obj, lock, current);
+   current->inc_held_monitor_count();
+ JRT_END
+ 
  void SharedRuntime::monitor_exit_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
    assert(JavaThread::current() == current, "invariant");
    // Exit must be non-blocking, and therefore no exceptions can be thrown.
    ExceptionMark em(current);
    // The object could become unlocked through a JNI call, which we have no other checks for.

*** 2917,11 ***
      ttyLocker ttyl;
      entry->print_adapter_on(tty);
      tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
                    _adapters->number_of_entries(), fingerprint->as_basic_args_string(),
                    fingerprint->as_string(), insts_size);
!     tty->print_cr("c2i argument handler starts at %p", entry->get_c2i_entry());
      if (Verbose || PrintStubCode) {
        address first_pc = entry->base_address();
        if (first_pc != NULL) {
          Disassembler::decode(first_pc, first_pc + insts_size, tty
                               NOT_PRODUCT(COMMA &new_adapter->asm_remarks()));
--- 2937,11 ---
      ttyLocker ttyl;
      entry->print_adapter_on(tty);
      tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
                    _adapters->number_of_entries(), fingerprint->as_basic_args_string(),
                    fingerprint->as_string(), insts_size);
!     tty->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(entry->get_c2i_entry()));
      if (Verbose || PrintStubCode) {
        address first_pc = entry->base_address();
        if (first_pc != NULL) {
          Disassembler::decode(first_pc, first_pc + insts_size, tty
                               NOT_PRODUCT(COMMA &new_adapter->asm_remarks()));

*** 3005,14 ***
    ResourceMark rm;
    nmethod* nm = NULL;
    address critical_entry = NULL;
  
    assert(method->is_native(), "must be native");
!   assert(method->is_method_handle_intrinsic() ||
           method->has_native_function(), "must have something valid to call!");
  
!   if (CriticalJNINatives && !method->is_method_handle_intrinsic()) {
      // We perform the I/O with transition to native before acquiring AdapterHandlerLibrary_lock.
      critical_entry = NativeLookup::lookup_critical_entry(method);
    }
  
    {
--- 3025,14 ---
    ResourceMark rm;
    nmethod* nm = NULL;
    address critical_entry = NULL;
  
    assert(method->is_native(), "must be native");
!   assert(method->is_special_native_intrinsic() ||
           method->has_native_function(), "must have something valid to call!");
  
!   if (CriticalJNINatives && !method->is_special_native_intrinsic()) {
      // We perform the I/O with transition to native before acquiring AdapterHandlerLibrary_lock.
      critical_entry = NativeLookup::lookup_critical_entry(method);
    }
  
    {

*** 3029,18 ***
--- 3049,25 ---
  
      ResourceMark rm;
      BufferBlob*  buf = buffer_blob(); // the temporary code buffer in CodeCache
      if (buf != NULL) {
        CodeBuffer buffer(buf);
+ 
+       if (method->is_continuation_enter_intrinsic()) {
+         buffer.initialize_stubs_size(64);
+       }
+ 
        struct { double data[20]; } locs_buf;
+       struct { double data[20]; } stubs_locs_buf;
        buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
  #if defined(AARCH64)
        // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
        // in the constant pool to ensure ordering between the barrier and oops
        // accesses. For native_wrappers we need a constant.
        buffer.initialize_consts_size(8);
  #endif
+       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
        MacroAssembler _masm(&buffer);
  
        // Fill in the signature array, for the calling-convention call.
        const int total_args_passed = method->size_of_parameters();
  

*** 3233,10 ***
--- 3260,16 ---
        buf[i++] = cast_from_oop<intptr_t>(kptr2->obj());
      }
    }
    assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
  
+   RegisterMap map(current, false);
+   frame sender = fr.sender(&map);
+   if (sender.is_interpreted_frame()) {
+     current->push_cont_fastpath(sender.sp());
+   }
+ 
    return buf;
  JRT_END
  
  JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
    FREE_C_HEAP_ARRAY(intptr_t, buf);

*** 3301,11 ***
    CompiledMethod* nm = NULL;
    int count = 1;
  
    assert(fr.is_java_frame(), "Must start on Java frame");
  
!   while (true) {
      Method* method = NULL;
      bool found = false;
      if (fr.is_interpreted_frame()) {
        method = fr.interpreter_frame_method();
        if (method != NULL && method->has_reserved_stack_access()) {
--- 3334,15 ---
    CompiledMethod* nm = NULL;
    int count = 1;
  
    assert(fr.is_java_frame(), "Must start on Java frame");
  
!   RegisterMap map(JavaThread::current(), false, false); // don't walk continuations
+   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
+     if (!fr.is_java_frame())
+       continue;
+ 
      Method* method = NULL;
      bool found = false;
      if (fr.is_interpreted_frame()) {
        method = fr.interpreter_frame_method();
        if (method != NULL && method->has_reserved_stack_access()) {

*** 3335,15 ***
        if (event.should_commit()) {
          event.set_method(method);
          event.commit();
        }
      }
-     if (fr.is_first_java_frame()) {
-       break;
-     } else {
-       fr = fr.java_sender();
-     }
    }
    return activation;
  }
  
  void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
--- 3372,10 ---
< prev index next >