1 /* 2 * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.inline.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/vmClasses.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "code/codeCache.hpp" 31 #include "compiler/compilationPolicy.hpp" 32 #include "compiler/compileBroker.hpp" 33 #include "compiler/disassembler.hpp" 34 #include "gc/shared/barrierSetNMethod.hpp" 35 #include "gc/shared/collectedHeap.hpp" 36 #include "interpreter/bytecodeTracer.hpp" 37 #include "interpreter/interpreter.hpp" 38 #include "interpreter/interpreterRuntime.hpp" 39 #include "interpreter/linkResolver.hpp" 40 #include "interpreter/templateTable.hpp" 41 #include "jvm_io.h" 42 #include "logging/log.hpp" 43 #include "memory/oopFactory.hpp" 44 #include "memory/resourceArea.hpp" 45 #include "memory/universe.hpp" 46 #include "oops/constantPool.inline.hpp" 47 #include "oops/cpCache.inline.hpp" 48 #include "oops/instanceKlass.inline.hpp" 49 #include "oops/klass.inline.hpp" 50 #include "oops/methodData.hpp" 51 #include "oops/method.inline.hpp" 52 #include "oops/objArrayKlass.hpp" 53 #include "oops/objArrayOop.inline.hpp" 54 #include "oops/oop.inline.hpp" 55 #include "oops/symbol.hpp" 56 #include "prims/jvmtiExport.hpp" 57 #include "prims/methodHandles.hpp" 58 #include "prims/nativeLookup.hpp" 59 #include "runtime/atomic.hpp" 60 #include "runtime/continuation.hpp" 61 #include "runtime/deoptimization.hpp" 62 #include "runtime/fieldDescriptor.inline.hpp" 63 #include "runtime/frame.inline.hpp" 64 #include "runtime/handles.inline.hpp" 65 #include "runtime/icache.hpp" 66 #include "runtime/interfaceSupport.inline.hpp" 67 #include "runtime/java.hpp" 68 #include "runtime/javaCalls.hpp" 69 #include "runtime/jfieldIDWorkaround.hpp" 70 #include "runtime/osThread.hpp" 71 #include "runtime/sharedRuntime.hpp" 72 #include "runtime/stackWatermarkSet.hpp" 73 #include "runtime/stubRoutines.hpp" 74 #include "runtime/synchronizer.inline.hpp" 75 #include "runtime/threadCritical.hpp" 76 #include "utilities/align.hpp" 77 #include "utilities/checkedCast.hpp" 78 #include "utilities/copy.hpp" 79 #include "utilities/events.hpp" 80 81 // Helper class to access current interpreter state 82 class LastFrameAccessor : public StackObj { 83 frame _last_frame; 84 public: 85 LastFrameAccessor(JavaThread* current) { 86 assert(current == Thread::current(), "sanity"); 87 _last_frame = current->last_frame(); 88 } 89 bool is_interpreted_frame() const { return _last_frame.is_interpreted_frame(); } 90 Method* method() const { return _last_frame.interpreter_frame_method(); } 91 address bcp() const { return _last_frame.interpreter_frame_bcp(); } 92 int bci() const { return _last_frame.interpreter_frame_bci(); } 93 address mdp() const { return _last_frame.interpreter_frame_mdp(); } 94 95 void set_bcp(address bcp) { _last_frame.interpreter_frame_set_bcp(bcp); } 96 void set_mdp(address dp) { _last_frame.interpreter_frame_set_mdp(dp); } 97 98 // pass method to avoid calling unsafe bcp_to_method (partial fix 4926272) 99 Bytecodes::Code code() const { return Bytecodes::code_at(method(), bcp()); } 100 101 Bytecode bytecode() const { return Bytecode(method(), bcp()); } 102 int get_index_u1(Bytecodes::Code bc) const { return bytecode().get_index_u1(bc); } 103 int get_index_u2(Bytecodes::Code bc) const { return bytecode().get_index_u2(bc); } 104 int get_index_u4(Bytecodes::Code bc) const { return bytecode().get_index_u4(bc); } 105 int number_of_dimensions() const { return bcp()[3]; } 106 107 oop callee_receiver(Symbol* signature) { 108 return _last_frame.interpreter_callee_receiver(signature); 109 } 110 BasicObjectLock* monitor_begin() const { 111 return _last_frame.interpreter_frame_monitor_begin(); 112 } 113 BasicObjectLock* monitor_end() const { 114 return _last_frame.interpreter_frame_monitor_end(); 115 } 116 BasicObjectLock* next_monitor(BasicObjectLock* current) const { 117 return _last_frame.next_monitor_in_interpreter_frame(current); 118 } 119 120 frame& get_frame() { return _last_frame; } 121 }; 122 123 //------------------------------------------------------------------------------------------------------------------------ 124 // State accessors 125 126 void InterpreterRuntime::set_bcp_and_mdp(address bcp, JavaThread* current) { 127 LastFrameAccessor last_frame(current); 128 last_frame.set_bcp(bcp); 129 if (ProfileInterpreter) { 130 // ProfileTraps uses MDOs independently of ProfileInterpreter. 131 // That is why we must check both ProfileInterpreter and mdo != nullptr. 132 MethodData* mdo = last_frame.method()->method_data(); 133 if (mdo != nullptr) { 134 NEEDS_CLEANUP; 135 last_frame.set_mdp(mdo->bci_to_dp(last_frame.bci())); 136 } 137 } 138 } 139 140 //------------------------------------------------------------------------------------------------------------------------ 141 // Constants 142 143 144 JRT_ENTRY(void, InterpreterRuntime::ldc(JavaThread* current, bool wide)) 145 // access constant pool 146 LastFrameAccessor last_frame(current); 147 ConstantPool* pool = last_frame.method()->constants(); 148 int cp_index = wide ? last_frame.get_index_u2(Bytecodes::_ldc_w) : last_frame.get_index_u1(Bytecodes::_ldc); 149 constantTag tag = pool->tag_at(cp_index); 150 151 assert (tag.is_unresolved_klass() || tag.is_klass(), "wrong ldc call"); 152 Klass* klass = pool->klass_at(cp_index, CHECK); 153 oop java_class = klass->java_mirror(); 154 current->set_vm_result(java_class); 155 JRT_END 156 157 JRT_ENTRY(void, InterpreterRuntime::resolve_ldc(JavaThread* current, Bytecodes::Code bytecode)) { 158 assert(bytecode == Bytecodes::_ldc || 159 bytecode == Bytecodes::_ldc_w || 160 bytecode == Bytecodes::_ldc2_w || 161 bytecode == Bytecodes::_fast_aldc || 162 bytecode == Bytecodes::_fast_aldc_w, "wrong bc"); 163 ResourceMark rm(current); 164 const bool is_fast_aldc = (bytecode == Bytecodes::_fast_aldc || 165 bytecode == Bytecodes::_fast_aldc_w); 166 LastFrameAccessor last_frame(current); 167 methodHandle m (current, last_frame.method()); 168 Bytecode_loadconstant ldc(m, last_frame.bci()); 169 170 // Double-check the size. (Condy can have any type.) 171 BasicType type = ldc.result_type(); 172 switch (type2size[type]) { 173 case 2: guarantee(bytecode == Bytecodes::_ldc2_w, ""); break; 174 case 1: guarantee(bytecode != Bytecodes::_ldc2_w, ""); break; 175 default: ShouldNotReachHere(); 176 } 177 178 // Resolve the constant. This does not do unboxing. 179 // But it does replace Universe::the_null_sentinel by null. 180 oop result = ldc.resolve_constant(CHECK); 181 assert(result != nullptr || is_fast_aldc, "null result only valid for fast_aldc"); 182 183 #ifdef ASSERT 184 { 185 // The bytecode wrappers aren't GC-safe so construct a new one 186 Bytecode_loadconstant ldc2(m, last_frame.bci()); 187 int rindex = ldc2.cache_index(); 188 if (rindex < 0) 189 rindex = m->constants()->cp_to_object_index(ldc2.pool_index()); 190 if (rindex >= 0) { 191 oop coop = m->constants()->resolved_reference_at(rindex); 192 oop roop = (result == nullptr ? Universe::the_null_sentinel() : result); 193 assert(roop == coop, "expected result for assembly code"); 194 } 195 } 196 #endif 197 current->set_vm_result(result); 198 if (!is_fast_aldc) { 199 // Tell the interpreter how to unbox the primitive. 200 guarantee(java_lang_boxing_object::is_instance(result, type), ""); 201 int offset = java_lang_boxing_object::value_offset(type); 202 intptr_t flags = ((as_TosState(type) << ConstantPoolCache::tos_state_shift) 203 | (offset & ConstantPoolCache::field_index_mask)); 204 current->set_vm_result_2((Metadata*)flags); 205 } 206 } 207 JRT_END 208 209 210 //------------------------------------------------------------------------------------------------------------------------ 211 // Allocation 212 213 JRT_ENTRY(void, InterpreterRuntime::_new(JavaThread* current, ConstantPool* pool, int index)) 214 Klass* k = pool->klass_at(index, CHECK); 215 InstanceKlass* klass = InstanceKlass::cast(k); 216 217 // Make sure we are not instantiating an abstract klass 218 klass->check_valid_for_instantiation(true, CHECK); 219 220 // Make sure klass is initialized 221 klass->initialize(CHECK); 222 223 oop obj = klass->allocate_instance(CHECK); 224 current->set_vm_result(obj); 225 JRT_END 226 227 228 JRT_ENTRY(void, InterpreterRuntime::newarray(JavaThread* current, BasicType type, jint size)) 229 oop obj = oopFactory::new_typeArray(type, size, CHECK); 230 current->set_vm_result(obj); 231 JRT_END 232 233 234 JRT_ENTRY(void, InterpreterRuntime::anewarray(JavaThread* current, ConstantPool* pool, int index, jint size)) 235 Klass* klass = pool->klass_at(index, CHECK); 236 objArrayOop obj = oopFactory::new_objArray(klass, size, CHECK); 237 current->set_vm_result(obj); 238 JRT_END 239 240 241 JRT_ENTRY(void, InterpreterRuntime::multianewarray(JavaThread* current, jint* first_size_address)) 242 // We may want to pass in more arguments - could make this slightly faster 243 LastFrameAccessor last_frame(current); 244 ConstantPool* constants = last_frame.method()->constants(); 245 int i = last_frame.get_index_u2(Bytecodes::_multianewarray); 246 Klass* klass = constants->klass_at(i, CHECK); 247 int nof_dims = last_frame.number_of_dimensions(); 248 assert(klass->is_klass(), "not a class"); 249 assert(nof_dims >= 1, "multianewarray rank must be nonzero"); 250 251 // We must create an array of jints to pass to multi_allocate. 252 ResourceMark rm(current); 253 const int small_dims = 10; 254 jint dim_array[small_dims]; 255 jint *dims = &dim_array[0]; 256 if (nof_dims > small_dims) { 257 dims = (jint*) NEW_RESOURCE_ARRAY(jint, nof_dims); 258 } 259 for (int index = 0; index < nof_dims; index++) { 260 // offset from first_size_address is addressed as local[index] 261 int n = Interpreter::local_offset_in_bytes(index)/jintSize; 262 dims[index] = first_size_address[n]; 263 } 264 oop obj = ArrayKlass::cast(klass)->multi_allocate(nof_dims, dims, CHECK); 265 current->set_vm_result(obj); 266 JRT_END 267 268 269 JRT_ENTRY(void, InterpreterRuntime::register_finalizer(JavaThread* current, oopDesc* obj)) 270 assert(oopDesc::is_oop(obj), "must be a valid oop"); 271 assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise"); 272 InstanceKlass::register_finalizer(instanceOop(obj), CHECK); 273 JRT_END 274 275 276 // Quicken instance-of and check-cast bytecodes 277 JRT_ENTRY(void, InterpreterRuntime::quicken_io_cc(JavaThread* current)) 278 // Force resolving; quicken the bytecode 279 LastFrameAccessor last_frame(current); 280 int which = last_frame.get_index_u2(Bytecodes::_checkcast); 281 ConstantPool* cpool = last_frame.method()->constants(); 282 // We'd expect to assert that we're only here to quicken bytecodes, but in a multithreaded 283 // program we might have seen an unquick'd bytecode in the interpreter but have another 284 // thread quicken the bytecode before we get here. 285 // assert( cpool->tag_at(which).is_unresolved_klass(), "should only come here to quicken bytecodes" ); 286 Klass* klass = cpool->klass_at(which, CHECK); 287 current->set_vm_result_2(klass); 288 JRT_END 289 290 291 //------------------------------------------------------------------------------------------------------------------------ 292 // Exceptions 293 294 void InterpreterRuntime::note_trap_inner(JavaThread* current, int reason, 295 const methodHandle& trap_method, int trap_bci) { 296 if (trap_method.not_null()) { 297 MethodData* trap_mdo = trap_method->method_data(); 298 if (trap_mdo == nullptr) { 299 ExceptionMark em(current); 300 JavaThread* THREAD = current; // For exception macros. 301 Method::build_profiling_method_data(trap_method, THREAD); 302 if (HAS_PENDING_EXCEPTION) { 303 // Only metaspace OOM is expected. No Java code executed. 304 assert((PENDING_EXCEPTION->is_a(vmClasses::OutOfMemoryError_klass())), 305 "we expect only an OOM error here"); 306 CLEAR_PENDING_EXCEPTION; 307 } 308 trap_mdo = trap_method->method_data(); 309 // and fall through... 310 } 311 if (trap_mdo != nullptr) { 312 // Update per-method count of trap events. The interpreter 313 // is updating the MDO to simulate the effect of compiler traps. 314 Deoptimization::update_method_data_from_interpreter(trap_mdo, trap_bci, reason); 315 } 316 } 317 } 318 319 // Assume the compiler is (or will be) interested in this event. 320 // If necessary, create an MDO to hold the information, and record it. 321 void InterpreterRuntime::note_trap(JavaThread* current, int reason) { 322 assert(ProfileTraps, "call me only if profiling"); 323 LastFrameAccessor last_frame(current); 324 methodHandle trap_method(current, last_frame.method()); 325 int trap_bci = trap_method->bci_from(last_frame.bcp()); 326 note_trap_inner(current, reason, trap_method, trap_bci); 327 } 328 329 static Handle get_preinitialized_exception(Klass* k, TRAPS) { 330 // get klass 331 InstanceKlass* klass = InstanceKlass::cast(k); 332 assert(klass->is_initialized(), 333 "this klass should have been initialized during VM initialization"); 334 // create instance - do not call constructor since we may have no 335 // (java) stack space left (should assert constructor is empty) 336 Handle exception; 337 oop exception_oop = klass->allocate_instance(CHECK_(exception)); 338 exception = Handle(THREAD, exception_oop); 339 if (StackTraceInThrowable) { 340 java_lang_Throwable::fill_in_stack_trace(exception); 341 } 342 return exception; 343 } 344 345 // Special handling for stack overflow: since we don't have any (java) stack 346 // space left we use the pre-allocated & pre-initialized StackOverflowError 347 // klass to create an stack overflow error instance. We do not call its 348 // constructor for the same reason (it is empty, anyway). 349 JRT_ENTRY(void, InterpreterRuntime::throw_StackOverflowError(JavaThread* current)) 350 Handle exception = get_preinitialized_exception( 351 vmClasses::StackOverflowError_klass(), 352 CHECK); 353 // Increment counter for hs_err file reporting 354 Atomic::inc(&Exceptions::_stack_overflow_errors); 355 // Remove the ScopedValue bindings in case we got a StackOverflowError 356 // while we were trying to manipulate ScopedValue bindings. 357 current->clear_scopedValueBindings(); 358 THROW_HANDLE(exception); 359 JRT_END 360 361 JRT_ENTRY(void, InterpreterRuntime::throw_delayed_StackOverflowError(JavaThread* current)) 362 Handle exception = get_preinitialized_exception( 363 vmClasses::StackOverflowError_klass(), 364 CHECK); 365 java_lang_Throwable::set_message(exception(), 366 Universe::delayed_stack_overflow_error_message()); 367 // Increment counter for hs_err file reporting 368 Atomic::inc(&Exceptions::_stack_overflow_errors); 369 // Remove the ScopedValue bindings in case we got a StackOverflowError 370 // while we were trying to manipulate ScopedValue bindings. 371 current->clear_scopedValueBindings(); 372 THROW_HANDLE(exception); 373 JRT_END 374 375 JRT_ENTRY(void, InterpreterRuntime::create_exception(JavaThread* current, char* name, char* message)) 376 // lookup exception klass 377 TempNewSymbol s = SymbolTable::new_symbol(name); 378 if (ProfileTraps) { 379 if (s == vmSymbols::java_lang_ArithmeticException()) { 380 note_trap(current, Deoptimization::Reason_div0_check); 381 } else if (s == vmSymbols::java_lang_NullPointerException()) { 382 note_trap(current, Deoptimization::Reason_null_check); 383 } 384 } 385 // create exception 386 Handle exception = Exceptions::new_exception(current, s, message); 387 current->set_vm_result(exception()); 388 JRT_END 389 390 391 JRT_ENTRY(void, InterpreterRuntime::create_klass_exception(JavaThread* current, char* name, oopDesc* obj)) 392 // Produce the error message first because note_trap can safepoint 393 ResourceMark rm(current); 394 const char* klass_name = obj->klass()->external_name(); 395 // lookup exception klass 396 TempNewSymbol s = SymbolTable::new_symbol(name); 397 if (ProfileTraps) { 398 if (s == vmSymbols::java_lang_ArrayStoreException()) { 399 note_trap(current, Deoptimization::Reason_array_check); 400 } else { 401 note_trap(current, Deoptimization::Reason_class_check); 402 } 403 } 404 // create exception, with klass name as detail message 405 Handle exception = Exceptions::new_exception(current, s, klass_name); 406 current->set_vm_result(exception()); 407 JRT_END 408 409 JRT_ENTRY(void, InterpreterRuntime::throw_ArrayIndexOutOfBoundsException(JavaThread* current, arrayOopDesc* a, jint index)) 410 // Produce the error message first because note_trap can safepoint 411 ResourceMark rm(current); 412 stringStream ss; 413 ss.print("Index %d out of bounds for length %d", index, a->length()); 414 415 if (ProfileTraps) { 416 note_trap(current, Deoptimization::Reason_range_check); 417 } 418 419 THROW_MSG(vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), ss.as_string()); 420 JRT_END 421 422 JRT_ENTRY(void, InterpreterRuntime::throw_ClassCastException( 423 JavaThread* current, oopDesc* obj)) 424 425 // Produce the error message first because note_trap can safepoint 426 ResourceMark rm(current); 427 char* message = SharedRuntime::generate_class_cast_message( 428 current, obj->klass()); 429 430 if (ProfileTraps) { 431 note_trap(current, Deoptimization::Reason_class_check); 432 } 433 434 // create exception 435 THROW_MSG(vmSymbols::java_lang_ClassCastException(), message); 436 JRT_END 437 438 // exception_handler_for_exception(...) returns the continuation address, 439 // the exception oop (via TLS) and sets the bci/bcp for the continuation. 440 // The exception oop is returned to make sure it is preserved over GC (it 441 // is only on the stack if the exception was thrown explicitly via athrow). 442 // During this operation, the expression stack contains the values for the 443 // bci where the exception happened. If the exception was propagated back 444 // from a call, the expression stack contains the values for the bci at the 445 // invoke w/o arguments (i.e., as if one were inside the call). 446 // Note that the implementation of this method assumes it's only called when an exception has actually occured 447 JRT_ENTRY(address, InterpreterRuntime::exception_handler_for_exception(JavaThread* current, oopDesc* exception)) 448 // We get here after we have unwound from a callee throwing an exception 449 // into the interpreter. Any deferred stack processing is notified of 450 // the event via the StackWatermarkSet. 451 StackWatermarkSet::after_unwind(current); 452 453 LastFrameAccessor last_frame(current); 454 Handle h_exception(current, exception); 455 methodHandle h_method (current, last_frame.method()); 456 constantPoolHandle h_constants(current, h_method->constants()); 457 bool should_repeat; 458 int handler_bci; 459 int current_bci = last_frame.bci(); 460 461 if (current->frames_to_pop_failed_realloc() > 0) { 462 // Allocation of scalar replaced object used in this frame 463 // failed. Unconditionally pop the frame. 464 current->dec_frames_to_pop_failed_realloc(); 465 current->set_vm_result(h_exception()); 466 // If the method is synchronized we already unlocked the monitor 467 // during deoptimization so the interpreter needs to skip it when 468 // the frame is popped. 469 current->set_do_not_unlock_if_synchronized(true); 470 return Interpreter::remove_activation_entry(); 471 } 472 473 // Need to do this check first since when _do_not_unlock_if_synchronized 474 // is set, we don't want to trigger any classloading which may make calls 475 // into java, or surprisingly find a matching exception handler for bci 0 476 // since at this moment the method hasn't been "officially" entered yet. 477 if (current->do_not_unlock_if_synchronized()) { 478 ResourceMark rm; 479 assert(current_bci == 0, "bci isn't zero for do_not_unlock_if_synchronized"); 480 current->set_vm_result(exception); 481 return Interpreter::remove_activation_entry(); 482 } 483 484 do { 485 should_repeat = false; 486 487 // assertions 488 assert(h_exception.not_null(), "null exceptions should be handled by athrow"); 489 // Check that exception is a subclass of Throwable. 490 assert(h_exception->is_a(vmClasses::Throwable_klass()), 491 "Exception not subclass of Throwable"); 492 493 // tracing 494 if (log_is_enabled(Info, exceptions)) { 495 ResourceMark rm(current); 496 stringStream tempst; 497 tempst.print("interpreter method <%s>\n" 498 " at bci %d for thread " INTPTR_FORMAT " (%s)", 499 h_method->print_value_string(), current_bci, p2i(current), current->name()); 500 Exceptions::log_exception(h_exception, tempst.as_string()); 501 } 502 // Don't go paging in something which won't be used. 503 // else if (extable->length() == 0) { 504 // // disabled for now - interpreter is not using shortcut yet 505 // // (shortcut is not to call runtime if we have no exception handlers) 506 // // warning("performance bug: should not call runtime if method has no exception handlers"); 507 // } 508 // for AbortVMOnException flag 509 Exceptions::debug_check_abort(h_exception); 510 511 // exception handler lookup 512 Klass* klass = h_exception->klass(); 513 handler_bci = Method::fast_exception_handler_bci_for(h_method, klass, current_bci, THREAD); 514 if (HAS_PENDING_EXCEPTION) { 515 // We threw an exception while trying to find the exception handler. 516 // Transfer the new exception to the exception handle which will 517 // be set into thread local storage, and do another lookup for an 518 // exception handler for this exception, this time starting at the 519 // BCI of the exception handler which caused the exception to be 520 // thrown (bug 4307310). 521 h_exception = Handle(THREAD, PENDING_EXCEPTION); 522 CLEAR_PENDING_EXCEPTION; 523 if (handler_bci >= 0) { 524 current_bci = handler_bci; 525 should_repeat = true; 526 } 527 } 528 } while (should_repeat == true); 529 530 #if INCLUDE_JVMCI 531 if (EnableJVMCI && h_method->method_data() != nullptr) { 532 ResourceMark rm(current); 533 MethodData* mdo = h_method->method_data(); 534 535 // Lock to read ProfileData, and ensure lock is not broken by a safepoint 536 MutexLocker ml(mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag); 537 538 ProfileData* pdata = mdo->allocate_bci_to_data(current_bci, nullptr); 539 if (pdata != nullptr && pdata->is_BitData()) { 540 BitData* bit_data = (BitData*) pdata; 541 bit_data->set_exception_seen(); 542 } 543 } 544 #endif 545 546 // notify JVMTI of an exception throw; JVMTI will detect if this is a first 547 // time throw or a stack unwinding throw and accordingly notify the debugger 548 if (JvmtiExport::can_post_on_exceptions()) { 549 JvmtiExport::post_exception_throw(current, h_method(), last_frame.bcp(), h_exception()); 550 } 551 552 address continuation = nullptr; 553 address handler_pc = nullptr; 554 if (handler_bci < 0 || !current->stack_overflow_state()->reguard_stack((address) &continuation)) { 555 // Forward exception to callee (leaving bci/bcp untouched) because (a) no 556 // handler in this method, or (b) after a stack overflow there is not yet 557 // enough stack space available to reprotect the stack. 558 continuation = Interpreter::remove_activation_entry(); 559 #if COMPILER2_OR_JVMCI 560 // Count this for compilation purposes 561 h_method->interpreter_throwout_increment(THREAD); 562 #endif 563 } else { 564 // handler in this method => change bci/bcp to handler bci/bcp and continue there 565 handler_pc = h_method->code_base() + handler_bci; 566 h_method->set_exception_handler_entered(handler_bci); // profiling 567 #ifndef ZERO 568 set_bcp_and_mdp(handler_pc, current); 569 continuation = Interpreter::dispatch_table(vtos)[*handler_pc]; 570 #else 571 continuation = (address)(intptr_t) handler_bci; 572 #endif 573 } 574 575 // notify debugger of an exception catch 576 // (this is good for exceptions caught in native methods as well) 577 if (JvmtiExport::can_post_on_exceptions()) { 578 JvmtiExport::notice_unwind_due_to_exception(current, h_method(), handler_pc, h_exception(), (handler_pc != nullptr)); 579 } 580 581 current->set_vm_result(h_exception()); 582 return continuation; 583 JRT_END 584 585 586 JRT_ENTRY(void, InterpreterRuntime::throw_pending_exception(JavaThread* current)) 587 assert(current->has_pending_exception(), "must only be called if there's an exception pending"); 588 // nothing to do - eventually we should remove this code entirely (see comments @ call sites) 589 JRT_END 590 591 592 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodError(JavaThread* current)) 593 THROW(vmSymbols::java_lang_AbstractMethodError()); 594 JRT_END 595 596 // This method is called from the "abstract_entry" of the interpreter. 597 // At that point, the arguments have already been removed from the stack 598 // and therefore we don't have the receiver object at our fingertips. (Though, 599 // on some platforms the receiver still resides in a register...). Thus, 600 // we have no choice but print an error message not containing the receiver 601 // type. 602 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodErrorWithMethod(JavaThread* current, 603 Method* missingMethod)) 604 ResourceMark rm(current); 605 assert(missingMethod != nullptr, "sanity"); 606 methodHandle m(current, missingMethod); 607 LinkResolver::throw_abstract_method_error(m, THREAD); 608 JRT_END 609 610 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodErrorVerbose(JavaThread* current, 611 Klass* recvKlass, 612 Method* missingMethod)) 613 ResourceMark rm(current); 614 methodHandle mh = methodHandle(current, missingMethod); 615 LinkResolver::throw_abstract_method_error(mh, recvKlass, THREAD); 616 JRT_END 617 618 619 JRT_ENTRY(void, InterpreterRuntime::throw_IncompatibleClassChangeError(JavaThread* current)) 620 THROW(vmSymbols::java_lang_IncompatibleClassChangeError()); 621 JRT_END 622 623 JRT_ENTRY(void, InterpreterRuntime::throw_IncompatibleClassChangeErrorVerbose(JavaThread* current, 624 Klass* recvKlass, 625 Klass* interfaceKlass)) 626 ResourceMark rm(current); 627 char buf[1000]; 628 buf[0] = '\0'; 629 jio_snprintf(buf, sizeof(buf), 630 "Class %s does not implement the requested interface %s", 631 recvKlass ? recvKlass->external_name() : "nullptr", 632 interfaceKlass ? interfaceKlass->external_name() : "nullptr"); 633 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), buf); 634 JRT_END 635 636 JRT_ENTRY(void, InterpreterRuntime::throw_NullPointerException(JavaThread* current)) 637 THROW(vmSymbols::java_lang_NullPointerException()); 638 JRT_END 639 640 //------------------------------------------------------------------------------------------------------------------------ 641 // Fields 642 // 643 644 void InterpreterRuntime::resolve_get_put(JavaThread* current, Bytecodes::Code bytecode) { 645 LastFrameAccessor last_frame(current); 646 constantPoolHandle pool(current, last_frame.method()->constants()); 647 methodHandle m(current, last_frame.method()); 648 649 resolve_get_put(bytecode, last_frame.get_index_u2(bytecode), m, pool, true /*initialize_holder*/, current); 650 } 651 652 void InterpreterRuntime::resolve_get_put(Bytecodes::Code bytecode, int field_index, 653 methodHandle& m, 654 constantPoolHandle& pool, 655 bool initialize_holder, TRAPS) { 656 fieldDescriptor info; 657 bool is_put = (bytecode == Bytecodes::_putfield || bytecode == Bytecodes::_nofast_putfield || 658 bytecode == Bytecodes::_putstatic); 659 bool is_static = (bytecode == Bytecodes::_getstatic || bytecode == Bytecodes::_putstatic); 660 661 { 662 JvmtiHideSingleStepping jhss(THREAD); 663 LinkResolver::resolve_field_access(info, pool, field_index, 664 m, bytecode, initialize_holder, CHECK); 665 } // end JvmtiHideSingleStepping 666 667 // check if link resolution caused cpCache to be updated 668 if (pool->resolved_field_entry_at(field_index)->is_resolved(bytecode)) return; 669 670 // compute auxiliary field attributes 671 TosState state = as_TosState(info.field_type()); 672 673 // Resolution of put instructions on final fields is delayed. That is required so that 674 // exceptions are thrown at the correct place (when the instruction is actually invoked). 675 // If we do not resolve an instruction in the current pass, leaving the put_code 676 // set to zero will cause the next put instruction to the same field to reresolve. 677 678 // Resolution of put instructions to final instance fields with invalid updates (i.e., 679 // to final instance fields with updates originating from a method different than <init>) 680 // is inhibited. A putfield instruction targeting an instance final field must throw 681 // an IllegalAccessError if the instruction is not in an instance 682 // initializer method <init>. If resolution were not inhibited, a putfield 683 // in an initializer method could be resolved in the initializer. Subsequent 684 // putfield instructions to the same field would then use cached information. 685 // As a result, those instructions would not pass through the VM. That is, 686 // checks in resolve_field_access() would not be executed for those instructions 687 // and the required IllegalAccessError would not be thrown. 688 // 689 // Also, we need to delay resolving getstatic and putstatic instructions until the 690 // class is initialized. This is required so that access to the static 691 // field will call the initialization function every time until the class 692 // is completely initialized ala. in 2.17.5 in JVM Specification. 693 InstanceKlass* klass = info.field_holder(); 694 bool uninitialized_static = is_static && !klass->is_initialized(); 695 bool has_initialized_final_update = info.field_holder()->major_version() >= 53 && 696 info.has_initialized_final_update(); 697 assert(!(has_initialized_final_update && !info.access_flags().is_final()), "Fields with initialized final updates must be final"); 698 699 Bytecodes::Code get_code = (Bytecodes::Code)0; 700 Bytecodes::Code put_code = (Bytecodes::Code)0; 701 if (!uninitialized_static) { 702 get_code = ((is_static) ? Bytecodes::_getstatic : Bytecodes::_getfield); 703 if ((is_put && !has_initialized_final_update) || !info.access_flags().is_final()) { 704 put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_putfield); 705 } 706 } 707 708 ResolvedFieldEntry* entry = pool->resolved_field_entry_at(field_index); 709 entry->set_flags(info.access_flags().is_final(), info.access_flags().is_volatile()); 710 entry->fill_in(info.field_holder(), info.offset(), 711 checked_cast<u2>(info.index()), checked_cast<u1>(state), 712 static_cast<u1>(get_code), static_cast<u1>(put_code)); 713 } 714 715 716 //------------------------------------------------------------------------------------------------------------------------ 717 // Synchronization 718 // 719 // The interpreter's synchronization code is factored out so that it can 720 // be shared by method invocation and synchronized blocks. 721 //%note synchronization_3 722 723 //%note monitor_1 724 JRT_ENTRY_NO_ASYNC(void, InterpreterRuntime::monitorenter(JavaThread* current, BasicObjectLock* elem)) 725 #ifdef ASSERT 726 current->last_frame().interpreter_frame_verify_monitor(elem); 727 #endif 728 Handle h_obj(current, elem->obj()); 729 assert(Universe::heap()->is_in_or_null(h_obj()), 730 "must be null or an object"); 731 ObjectSynchronizer::enter(h_obj, elem->lock(), current); 732 assert(Universe::heap()->is_in_or_null(elem->obj()), 733 "must be null or an object"); 734 #ifdef ASSERT 735 if (!current->preempting()) current->last_frame().interpreter_frame_verify_monitor(elem); 736 #endif 737 JRT_END 738 739 JRT_LEAF(void, InterpreterRuntime::monitorexit(BasicObjectLock* elem)) 740 oop obj = elem->obj(); 741 assert(Universe::heap()->is_in(obj), "must be an object"); 742 // The object could become unlocked through a JNI call, which we have no other checks for. 743 // Give a fatal message if CheckJNICalls. Otherwise we ignore it. 744 if (obj->is_unlocked()) { 745 if (CheckJNICalls) { 746 fatal("Object has been unlocked by JNI"); 747 } 748 return; 749 } 750 ObjectSynchronizer::exit(obj, elem->lock(), JavaThread::current()); 751 // Free entry. If it is not cleared, the exception handling code will try to unlock the monitor 752 // again at method exit or in the case of an exception. 753 elem->set_obj(nullptr); 754 JRT_END 755 756 757 JRT_ENTRY(void, InterpreterRuntime::throw_illegal_monitor_state_exception(JavaThread* current)) 758 THROW(vmSymbols::java_lang_IllegalMonitorStateException()); 759 JRT_END 760 761 762 JRT_ENTRY(void, InterpreterRuntime::new_illegal_monitor_state_exception(JavaThread* current)) 763 // Returns an illegal exception to install into the current thread. The 764 // pending_exception flag is cleared so normal exception handling does not 765 // trigger. Any current installed exception will be overwritten. This 766 // method will be called during an exception unwind. 767 768 assert(!HAS_PENDING_EXCEPTION, "no pending exception"); 769 Handle exception(current, current->vm_result()); 770 assert(exception() != nullptr, "vm result should be set"); 771 current->set_vm_result(nullptr); // clear vm result before continuing (may cause memory leaks and assert failures) 772 exception = get_preinitialized_exception(vmClasses::IllegalMonitorStateException_klass(), CATCH); 773 current->set_vm_result(exception()); 774 JRT_END 775 776 777 //------------------------------------------------------------------------------------------------------------------------ 778 // Invokes 779 780 JRT_ENTRY(Bytecodes::Code, InterpreterRuntime::get_original_bytecode_at(JavaThread* current, Method* method, address bcp)) 781 return method->orig_bytecode_at(method->bci_from(bcp)); 782 JRT_END 783 784 JRT_ENTRY(void, InterpreterRuntime::set_original_bytecode_at(JavaThread* current, Method* method, address bcp, Bytecodes::Code new_code)) 785 method->set_orig_bytecode_at(method->bci_from(bcp), new_code); 786 JRT_END 787 788 JRT_ENTRY(void, InterpreterRuntime::_breakpoint(JavaThread* current, Method* method, address bcp)) 789 JvmtiExport::post_raw_breakpoint(current, method, bcp); 790 JRT_END 791 792 void InterpreterRuntime::resolve_invoke(JavaThread* current, Bytecodes::Code bytecode) { 793 LastFrameAccessor last_frame(current); 794 // extract receiver from the outgoing argument list if necessary 795 Handle receiver(current, nullptr); 796 if (bytecode == Bytecodes::_invokevirtual || bytecode == Bytecodes::_invokeinterface || 797 bytecode == Bytecodes::_invokespecial) { 798 ResourceMark rm(current); 799 methodHandle m (current, last_frame.method()); 800 Bytecode_invoke call(m, last_frame.bci()); 801 Symbol* signature = call.signature(); 802 receiver = Handle(current, last_frame.callee_receiver(signature)); 803 804 assert(Universe::heap()->is_in_or_null(receiver()), 805 "sanity check"); 806 assert(receiver.is_null() || 807 !Universe::heap()->is_in(receiver->klass()), 808 "sanity check"); 809 } 810 811 // resolve method 812 CallInfo info; 813 constantPoolHandle pool(current, last_frame.method()->constants()); 814 815 methodHandle resolved_method; 816 817 int method_index = last_frame.get_index_u2(bytecode); 818 { 819 JvmtiHideSingleStepping jhss(current); 820 JavaThread* THREAD = current; // For exception macros. 821 LinkResolver::resolve_invoke(info, receiver, pool, 822 method_index, bytecode, 823 THREAD); 824 825 if (HAS_PENDING_EXCEPTION) { 826 if (ProfileTraps && PENDING_EXCEPTION->klass()->name() == vmSymbols::java_lang_NullPointerException()) { 827 // Preserve the original exception across the call to note_trap() 828 PreserveExceptionMark pm(current); 829 // Recording the trap will help the compiler to potentially recognize this exception as "hot" 830 note_trap(current, Deoptimization::Reason_null_check); 831 } 832 return; 833 } 834 835 resolved_method = methodHandle(current, info.resolved_method()); 836 } // end JvmtiHideSingleStepping 837 838 update_invoke_cp_cache_entry(info, bytecode, resolved_method, pool, method_index); 839 } 840 841 void InterpreterRuntime::update_invoke_cp_cache_entry(CallInfo& info, Bytecodes::Code bytecode, 842 methodHandle& resolved_method, 843 constantPoolHandle& pool, 844 int method_index) { 845 // Don't allow safepoints until the method is cached. 846 NoSafepointVerifier nsv; 847 848 // check if link resolution caused cpCache to be updated 849 ConstantPoolCache* cache = pool->cache(); 850 if (cache->resolved_method_entry_at(method_index)->is_resolved(bytecode)) return; 851 852 #ifdef ASSERT 853 if (bytecode == Bytecodes::_invokeinterface) { 854 if (resolved_method->method_holder() == vmClasses::Object_klass()) { 855 // NOTE: THIS IS A FIX FOR A CORNER CASE in the JVM spec 856 // (see also CallInfo::set_interface for details) 857 assert(info.call_kind() == CallInfo::vtable_call || 858 info.call_kind() == CallInfo::direct_call, ""); 859 assert(resolved_method->is_final() || info.has_vtable_index(), 860 "should have been set already"); 861 } else if (!resolved_method->has_itable_index()) { 862 // Resolved something like CharSequence.toString. Use vtable not itable. 863 assert(info.call_kind() != CallInfo::itable_call, ""); 864 } else { 865 // Setup itable entry 866 assert(info.call_kind() == CallInfo::itable_call, ""); 867 int index = resolved_method->itable_index(); 868 assert(info.itable_index() == index, ""); 869 } 870 } else if (bytecode == Bytecodes::_invokespecial) { 871 assert(info.call_kind() == CallInfo::direct_call, "must be direct call"); 872 } else { 873 assert(info.call_kind() == CallInfo::direct_call || 874 info.call_kind() == CallInfo::vtable_call, ""); 875 } 876 #endif 877 // Get sender and only set cpCache entry to resolved if it is not an 878 // interface. The receiver for invokespecial calls within interface 879 // methods must be checked for every call. 880 InstanceKlass* sender = pool->pool_holder(); 881 882 switch (info.call_kind()) { 883 case CallInfo::direct_call: 884 cache->set_direct_call(bytecode, method_index, resolved_method, sender->is_interface()); 885 break; 886 case CallInfo::vtable_call: 887 cache->set_vtable_call(bytecode, method_index, resolved_method, info.vtable_index()); 888 break; 889 case CallInfo::itable_call: 890 cache->set_itable_call( 891 bytecode, 892 method_index, 893 info.resolved_klass(), 894 resolved_method, 895 info.itable_index()); 896 break; 897 default: ShouldNotReachHere(); 898 } 899 } 900 901 void InterpreterRuntime::cds_resolve_invoke(Bytecodes::Code bytecode, int method_index, 902 constantPoolHandle& pool, TRAPS) { 903 LinkInfo link_info(pool, method_index, bytecode, CHECK); 904 905 if (!link_info.resolved_klass()->is_instance_klass() || InstanceKlass::cast(link_info.resolved_klass())->is_linked()) { 906 CallInfo call_info; 907 switch (bytecode) { 908 case Bytecodes::_invokevirtual: LinkResolver::cds_resolve_virtual_call (call_info, link_info, CHECK); break; 909 case Bytecodes::_invokeinterface: LinkResolver::cds_resolve_interface_call(call_info, link_info, CHECK); break; 910 case Bytecodes::_invokespecial: LinkResolver::cds_resolve_special_call (call_info, link_info, CHECK); break; 911 912 default: fatal("Unimplemented: %s", Bytecodes::name(bytecode)); 913 } 914 methodHandle resolved_method(THREAD, call_info.resolved_method()); 915 guarantee(resolved_method->method_holder()->is_linked(), ""); 916 update_invoke_cp_cache_entry(call_info, bytecode, resolved_method, pool, method_index); 917 } else { 918 // FIXME: why a shared class is not linked yet? 919 // Can't link it here since there are no guarantees it'll be prelinked on the next run. 920 ResourceMark rm; 921 InstanceKlass* resolved_iklass = InstanceKlass::cast(link_info.resolved_klass()); 922 log_info(cds, resolve)("Not resolved: class not linked: %s %s %s", 923 resolved_iklass->is_shared() ? "is_shared" : "", 924 resolved_iklass->init_state_name(), 925 resolved_iklass->external_name()); 926 } 927 } 928 929 // First time execution: Resolve symbols, create a permanent MethodType object. 930 void InterpreterRuntime::resolve_invokehandle(JavaThread* current) { 931 const Bytecodes::Code bytecode = Bytecodes::_invokehandle; 932 LastFrameAccessor last_frame(current); 933 934 // resolve method 935 CallInfo info; 936 constantPoolHandle pool(current, last_frame.method()->constants()); 937 int method_index = last_frame.get_index_u2(bytecode); 938 { 939 JvmtiHideSingleStepping jhss(current); 940 JavaThread* THREAD = current; // For exception macros. 941 LinkResolver::resolve_invoke(info, Handle(), pool, 942 method_index, bytecode, 943 CHECK); 944 } // end JvmtiHideSingleStepping 945 946 pool->cache()->set_method_handle(method_index, info); 947 } 948 949 void InterpreterRuntime::cds_resolve_invokehandle(int raw_index, 950 constantPoolHandle& pool, TRAPS) { 951 const Bytecodes::Code bytecode = Bytecodes::_invokehandle; 952 CallInfo info; 953 LinkResolver::resolve_invoke(info, Handle(), pool, raw_index, bytecode, CHECK); 954 955 pool->cache()->set_method_handle(raw_index, info); 956 } 957 958 // First time execution: Resolve symbols, create a permanent CallSite object. 959 void InterpreterRuntime::resolve_invokedynamic(JavaThread* current) { 960 LastFrameAccessor last_frame(current); 961 const Bytecodes::Code bytecode = Bytecodes::_invokedynamic; 962 963 // resolve method 964 CallInfo info; 965 constantPoolHandle pool(current, last_frame.method()->constants()); 966 int index = last_frame.get_index_u4(bytecode); 967 { 968 JvmtiHideSingleStepping jhss(current); 969 JavaThread* THREAD = current; // For exception macros. 970 LinkResolver::resolve_invoke(info, Handle(), pool, 971 index, bytecode, CHECK); 972 } // end JvmtiHideSingleStepping 973 974 pool->cache()->set_dynamic_call(info, index); 975 } 976 977 void InterpreterRuntime::cds_resolve_invokedynamic(int raw_index, 978 constantPoolHandle& pool, TRAPS) { 979 const Bytecodes::Code bytecode = Bytecodes::_invokedynamic; 980 CallInfo info; 981 LinkResolver::resolve_invoke(info, Handle(), pool, raw_index, bytecode, CHECK); 982 pool->cache()->set_dynamic_call(info, raw_index); 983 } 984 985 // This function is the interface to the assembly code. It returns the resolved 986 // cpCache entry. This doesn't safepoint, but the helper routines safepoint. 987 // This function will check for redefinition! 988 JRT_ENTRY(void, InterpreterRuntime::resolve_from_cache(JavaThread* current, Bytecodes::Code bytecode)) { 989 switch (bytecode) { 990 case Bytecodes::_getstatic: 991 case Bytecodes::_putstatic: 992 case Bytecodes::_getfield: 993 case Bytecodes::_putfield: 994 resolve_get_put(current, bytecode); 995 break; 996 case Bytecodes::_invokevirtual: 997 case Bytecodes::_invokespecial: 998 case Bytecodes::_invokestatic: 999 case Bytecodes::_invokeinterface: 1000 resolve_invoke(current, bytecode); 1001 break; 1002 case Bytecodes::_invokehandle: 1003 resolve_invokehandle(current); 1004 break; 1005 case Bytecodes::_invokedynamic: 1006 resolve_invokedynamic(current); 1007 break; 1008 default: 1009 fatal("unexpected bytecode: %s", Bytecodes::name(bytecode)); 1010 break; 1011 } 1012 } 1013 JRT_END 1014 1015 //------------------------------------------------------------------------------------------------------------------------ 1016 // Miscellaneous 1017 1018 1019 nmethod* InterpreterRuntime::frequency_counter_overflow(JavaThread* current, address branch_bcp) { 1020 // Enable WXWrite: the function is called directly by interpreter. 1021 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXWrite, current)); 1022 1023 // frequency_counter_overflow_inner can throw async exception. 1024 nmethod* nm = frequency_counter_overflow_inner(current, branch_bcp); 1025 assert(branch_bcp != nullptr || nm == nullptr, "always returns null for non OSR requests"); 1026 if (branch_bcp != nullptr && nm != nullptr) { 1027 // This was a successful request for an OSR nmethod. Because 1028 // frequency_counter_overflow_inner ends with a safepoint check, 1029 // nm could have been unloaded so look it up again. It's unsafe 1030 // to examine nm directly since it might have been freed and used 1031 // for something else. 1032 LastFrameAccessor last_frame(current); 1033 Method* method = last_frame.method(); 1034 int bci = method->bci_from(last_frame.bcp()); 1035 nm = method->lookup_osr_nmethod_for(bci, CompLevel_none, false); 1036 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); 1037 if (nm != nullptr && bs_nm != nullptr) { 1038 // in case the transition passed a safepoint we need to barrier this again 1039 if (!bs_nm->nmethod_osr_entry_barrier(nm)) { 1040 nm = nullptr; 1041 } 1042 } 1043 } 1044 if (nm != nullptr && current->is_interp_only_mode()) { 1045 // Normally we never get an nm if is_interp_only_mode() is true, because 1046 // policy()->event has a check for this and won't compile the method when 1047 // true. However, it's possible for is_interp_only_mode() to become true 1048 // during the compilation. We don't want to return the nm in that case 1049 // because we want to continue to execute interpreted. 1050 nm = nullptr; 1051 } 1052 #ifndef PRODUCT 1053 if (TraceOnStackReplacement) { 1054 if (nm != nullptr) { 1055 tty->print("OSR entry @ pc: " INTPTR_FORMAT ": ", p2i(nm->osr_entry())); 1056 nm->print(); 1057 } 1058 } 1059 #endif 1060 return nm; 1061 } 1062 1063 JRT_ENTRY(nmethod*, 1064 InterpreterRuntime::frequency_counter_overflow_inner(JavaThread* current, address branch_bcp)) 1065 // use UnlockFlagSaver to clear and restore the _do_not_unlock_if_synchronized 1066 // flag, in case this method triggers classloading which will call into Java. 1067 UnlockFlagSaver fs(current); 1068 1069 LastFrameAccessor last_frame(current); 1070 assert(last_frame.is_interpreted_frame(), "must come from interpreter"); 1071 methodHandle method(current, last_frame.method()); 1072 const int branch_bci = branch_bcp != nullptr ? method->bci_from(branch_bcp) : InvocationEntryBci; 1073 const int bci = branch_bcp != nullptr ? method->bci_from(last_frame.bcp()) : InvocationEntryBci; 1074 1075 nmethod* osr_nm = CompilationPolicy::event(method, method, branch_bci, bci, CompLevel_none, nullptr, CHECK_NULL); 1076 1077 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); 1078 if (osr_nm != nullptr && bs_nm != nullptr) { 1079 if (!bs_nm->nmethod_osr_entry_barrier(osr_nm)) { 1080 osr_nm = nullptr; 1081 } 1082 } 1083 return osr_nm; 1084 JRT_END 1085 1086 JRT_LEAF(jint, InterpreterRuntime::bcp_to_di(Method* method, address cur_bcp)) 1087 assert(ProfileInterpreter, "must be profiling interpreter"); 1088 int bci = method->bci_from(cur_bcp); 1089 MethodData* mdo = method->method_data(); 1090 if (mdo == nullptr) return 0; 1091 return mdo->bci_to_di(bci); 1092 JRT_END 1093 1094 #ifdef ASSERT 1095 JRT_LEAF(void, InterpreterRuntime::verify_mdp(Method* method, address bcp, address mdp)) 1096 assert(ProfileInterpreter, "must be profiling interpreter"); 1097 1098 MethodData* mdo = method->method_data(); 1099 assert(mdo != nullptr, "must not be null"); 1100 1101 int bci = method->bci_from(bcp); 1102 1103 address mdp2 = mdo->bci_to_dp(bci); 1104 if (mdp != mdp2) { 1105 ResourceMark rm; 1106 tty->print_cr("FAILED verify : actual mdp %p expected mdp %p @ bci %d", mdp, mdp2, bci); 1107 int current_di = mdo->dp_to_di(mdp); 1108 int expected_di = mdo->dp_to_di(mdp2); 1109 tty->print_cr(" actual di %d expected di %d", current_di, expected_di); 1110 int expected_approx_bci = mdo->data_at(expected_di)->bci(); 1111 int approx_bci = -1; 1112 if (current_di >= 0) { 1113 approx_bci = mdo->data_at(current_di)->bci(); 1114 } 1115 tty->print_cr(" actual bci is %d expected bci %d", approx_bci, expected_approx_bci); 1116 mdo->print_on(tty); 1117 method->print_codes(); 1118 } 1119 assert(mdp == mdp2, "wrong mdp"); 1120 JRT_END 1121 #endif // ASSERT 1122 1123 JRT_ENTRY(void, InterpreterRuntime::update_mdp_for_ret(JavaThread* current, int return_bci)) 1124 assert(ProfileInterpreter, "must be profiling interpreter"); 1125 ResourceMark rm(current); 1126 LastFrameAccessor last_frame(current); 1127 assert(last_frame.is_interpreted_frame(), "must come from interpreter"); 1128 MethodData* h_mdo = last_frame.method()->method_data(); 1129 1130 // Grab a lock to ensure atomic access to setting the return bci and 1131 // the displacement. This can block and GC, invalidating all naked oops. 1132 MutexLocker ml(RetData_lock); 1133 1134 // ProfileData is essentially a wrapper around a derived oop, so we 1135 // need to take the lock before making any ProfileData structures. 1136 ProfileData* data = h_mdo->data_at(h_mdo->dp_to_di(last_frame.mdp())); 1137 guarantee(data != nullptr, "profile data must be valid"); 1138 RetData* rdata = data->as_RetData(); 1139 address new_mdp = rdata->fixup_ret(return_bci, h_mdo); 1140 last_frame.set_mdp(new_mdp); 1141 JRT_END 1142 1143 JRT_ENTRY(MethodCounters*, InterpreterRuntime::build_method_counters(JavaThread* current, Method* m)) 1144 return Method::build_method_counters(current, m); 1145 JRT_END 1146 1147 1148 JRT_ENTRY(void, InterpreterRuntime::at_safepoint(JavaThread* current)) 1149 // We used to need an explicit preserve_arguments here for invoke bytecodes. However, 1150 // stack traversal automatically takes care of preserving arguments for invoke, so 1151 // this is no longer needed. 1152 1153 // JRT_END does an implicit safepoint check, hence we are guaranteed to block 1154 // if this is called during a safepoint 1155 1156 if (JvmtiExport::should_post_single_step()) { 1157 // This function is called by the interpreter when single stepping. Such single 1158 // stepping could unwind a frame. Then, it is important that we process any frames 1159 // that we might return into. 1160 StackWatermarkSet::before_unwind(current); 1161 1162 // We are called during regular safepoints and when the VM is 1163 // single stepping. If any thread is marked for single stepping, 1164 // then we may have JVMTI work to do. 1165 LastFrameAccessor last_frame(current); 1166 JvmtiExport::at_single_stepping_point(current, last_frame.method(), last_frame.bcp()); 1167 } 1168 JRT_END 1169 1170 JRT_LEAF(void, InterpreterRuntime::at_unwind(JavaThread* current)) 1171 assert(current == JavaThread::current(), "pre-condition"); 1172 // This function is called by the interpreter when the return poll found a reason 1173 // to call the VM. The reason could be that we are returning into a not yet safe 1174 // to access frame. We handle that below. 1175 // Note that this path does not check for single stepping, because we do not want 1176 // to single step when unwinding frames for an exception being thrown. Instead, 1177 // such single stepping code will use the safepoint table, which will use the 1178 // InterpreterRuntime::at_safepoint callback. 1179 StackWatermarkSet::before_unwind(current); 1180 JRT_END 1181 1182 JRT_ENTRY(void, InterpreterRuntime::post_field_access(JavaThread* current, oopDesc* obj, 1183 ResolvedFieldEntry *entry)) 1184 1185 // check the access_flags for the field in the klass 1186 1187 InstanceKlass* ik = entry->field_holder(); 1188 int index = entry->field_index(); 1189 if (!ik->field_status(index).is_access_watched()) return; 1190 1191 bool is_static = (obj == nullptr); 1192 HandleMark hm(current); 1193 1194 Handle h_obj; 1195 if (!is_static) { 1196 // non-static field accessors have an object, but we need a handle 1197 h_obj = Handle(current, obj); 1198 } 1199 InstanceKlass* field_holder = entry->field_holder(); // HERE 1200 jfieldID fid = jfieldIDWorkaround::to_jfieldID(field_holder, entry->field_offset(), is_static); 1201 LastFrameAccessor last_frame(current); 1202 JvmtiExport::post_field_access(current, last_frame.method(), last_frame.bcp(), field_holder, h_obj, fid); 1203 JRT_END 1204 1205 JRT_ENTRY(void, InterpreterRuntime::post_field_modification(JavaThread* current, oopDesc* obj, 1206 ResolvedFieldEntry *entry, jvalue *value)) 1207 1208 InstanceKlass* ik = entry->field_holder(); 1209 1210 // check the access_flags for the field in the klass 1211 int index = entry->field_index(); 1212 // bail out if field modifications are not watched 1213 if (!ik->field_status(index).is_modification_watched()) return; 1214 1215 char sig_type = '\0'; 1216 1217 switch((TosState)entry->tos_state()) { 1218 case btos: sig_type = JVM_SIGNATURE_BYTE; break; 1219 case ztos: sig_type = JVM_SIGNATURE_BOOLEAN; break; 1220 case ctos: sig_type = JVM_SIGNATURE_CHAR; break; 1221 case stos: sig_type = JVM_SIGNATURE_SHORT; break; 1222 case itos: sig_type = JVM_SIGNATURE_INT; break; 1223 case ftos: sig_type = JVM_SIGNATURE_FLOAT; break; 1224 case atos: sig_type = JVM_SIGNATURE_CLASS; break; 1225 case ltos: sig_type = JVM_SIGNATURE_LONG; break; 1226 case dtos: sig_type = JVM_SIGNATURE_DOUBLE; break; 1227 default: ShouldNotReachHere(); return; 1228 } 1229 bool is_static = (obj == nullptr); 1230 1231 HandleMark hm(current); 1232 jfieldID fid = jfieldIDWorkaround::to_jfieldID(ik, entry->field_offset(), is_static); 1233 jvalue fvalue; 1234 #ifdef _LP64 1235 fvalue = *value; 1236 #else 1237 // Long/double values are stored unaligned and also noncontiguously with 1238 // tagged stacks. We can't just do a simple assignment even in the non- 1239 // J/D cases because a C++ compiler is allowed to assume that a jvalue is 1240 // 8-byte aligned, and interpreter stack slots are only 4-byte aligned. 1241 // We assume that the two halves of longs/doubles are stored in interpreter 1242 // stack slots in platform-endian order. 1243 jlong_accessor u; 1244 jint* newval = (jint*)value; 1245 u.words[0] = newval[0]; 1246 u.words[1] = newval[Interpreter::stackElementWords]; // skip if tag 1247 fvalue.j = u.long_value; 1248 #endif // _LP64 1249 1250 Handle h_obj; 1251 if (!is_static) { 1252 // non-static field accessors have an object, but we need a handle 1253 h_obj = Handle(current, obj); 1254 } 1255 1256 LastFrameAccessor last_frame(current); 1257 JvmtiExport::post_raw_field_modification(current, last_frame.method(), last_frame.bcp(), ik, h_obj, 1258 fid, sig_type, &fvalue); 1259 JRT_END 1260 1261 JRT_ENTRY(void, InterpreterRuntime::post_method_entry(JavaThread* current)) 1262 LastFrameAccessor last_frame(current); 1263 JvmtiExport::post_method_entry(current, last_frame.method(), last_frame.get_frame()); 1264 JRT_END 1265 1266 1267 // This is a JRT_BLOCK_ENTRY because we have to stash away the return oop 1268 // before transitioning to VM, and restore it after transitioning back 1269 // to Java. The return oop at the top-of-stack, is not walked by the GC. 1270 JRT_BLOCK_ENTRY(void, InterpreterRuntime::post_method_exit(JavaThread* current)) 1271 LastFrameAccessor last_frame(current); 1272 JvmtiExport::post_method_exit(current, last_frame.method(), last_frame.get_frame()); 1273 JRT_END 1274 1275 JRT_LEAF(int, InterpreterRuntime::interpreter_contains(address pc)) 1276 { 1277 return (Interpreter::contains(Continuation::get_top_return_pc_post_barrier(JavaThread::current(), pc)) ? 1 : 0); 1278 } 1279 JRT_END 1280 1281 1282 // Implementation of SignatureHandlerLibrary 1283 1284 #ifndef SHARING_FAST_NATIVE_FINGERPRINTS 1285 // Dummy definition (else normalization method is defined in CPU 1286 // dependent code) 1287 uint64_t InterpreterRuntime::normalize_fast_native_fingerprint(uint64_t fingerprint) { 1288 return fingerprint; 1289 } 1290 #endif 1291 1292 address SignatureHandlerLibrary::set_handler_blob() { 1293 BufferBlob* handler_blob = BufferBlob::create("native signature handlers", blob_size); 1294 if (handler_blob == nullptr) { 1295 return nullptr; 1296 } 1297 address handler = handler_blob->code_begin(); 1298 _handler_blob = handler_blob; 1299 _handler = handler; 1300 return handler; 1301 } 1302 1303 void SignatureHandlerLibrary::initialize() { 1304 if (_fingerprints != nullptr) { 1305 return; 1306 } 1307 if (set_handler_blob() == nullptr) { 1308 vm_exit_out_of_memory(blob_size, OOM_MALLOC_ERROR, "native signature handlers"); 1309 } 1310 1311 BufferBlob* bb = BufferBlob::create("Signature Handler Temp Buffer", 1312 SignatureHandlerLibrary::buffer_size); 1313 _buffer = bb->code_begin(); 1314 1315 _fingerprints = new (mtCode) GrowableArray<uint64_t>(32, mtCode); 1316 _handlers = new (mtCode) GrowableArray<address>(32, mtCode); 1317 } 1318 1319 address SignatureHandlerLibrary::set_handler(CodeBuffer* buffer) { 1320 address handler = _handler; 1321 int insts_size = buffer->pure_insts_size(); 1322 if (handler + insts_size > _handler_blob->code_end()) { 1323 // get a new handler blob 1324 handler = set_handler_blob(); 1325 } 1326 if (handler != nullptr) { 1327 memcpy(handler, buffer->insts_begin(), insts_size); 1328 pd_set_handler(handler); 1329 ICache::invalidate_range(handler, insts_size); 1330 _handler = handler + insts_size; 1331 } 1332 return handler; 1333 } 1334 1335 void SignatureHandlerLibrary::add(const methodHandle& method) { 1336 if (method->signature_handler() == nullptr) { 1337 // use slow signature handler if we can't do better 1338 int handler_index = -1; 1339 // check if we can use customized (fast) signature handler 1340 if (UseFastSignatureHandlers && method->size_of_parameters() <= Fingerprinter::fp_max_size_of_parameters) { 1341 // use customized signature handler 1342 MutexLocker mu(SignatureHandlerLibrary_lock); 1343 // make sure data structure is initialized 1344 initialize(); 1345 // lookup method signature's fingerprint 1346 uint64_t fingerprint = Fingerprinter(method).fingerprint(); 1347 // allow CPU dependent code to optimize the fingerprints for the fast handler 1348 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1349 handler_index = _fingerprints->find(fingerprint); 1350 // create handler if necessary 1351 if (handler_index < 0) { 1352 ResourceMark rm; 1353 ptrdiff_t align_offset = align_up(_buffer, CodeEntryAlignment) - (address)_buffer; 1354 CodeBuffer buffer((address)(_buffer + align_offset), 1355 checked_cast<int>(SignatureHandlerLibrary::buffer_size - align_offset)); 1356 InterpreterRuntime::SignatureHandlerGenerator(method, &buffer).generate(fingerprint); 1357 // copy into code heap 1358 address handler = set_handler(&buffer); 1359 if (handler == nullptr) { 1360 // use slow signature handler (without memorizing it in the fingerprints) 1361 } else { 1362 // debugging support 1363 if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) { 1364 ttyLocker ttyl; 1365 tty->cr(); 1366 tty->print_cr("argument handler #%d for: %s %s (fingerprint = " UINT64_FORMAT ", %d bytes generated)", 1367 _handlers->length(), 1368 (method->is_static() ? "static" : "receiver"), 1369 method->name_and_sig_as_C_string(), 1370 fingerprint, 1371 buffer.insts_size()); 1372 if (buffer.insts_size() > 0) { 1373 Disassembler::decode(handler, handler + buffer.insts_size(), tty 1374 NOT_PRODUCT(COMMA &buffer.asm_remarks())); 1375 } 1376 #ifndef PRODUCT 1377 address rh_begin = Interpreter::result_handler(method()->result_type()); 1378 if (CodeCache::contains(rh_begin)) { 1379 // else it might be special platform dependent values 1380 tty->print_cr(" --- associated result handler ---"); 1381 address rh_end = rh_begin; 1382 while (*(int*)rh_end != 0) { 1383 rh_end += sizeof(int); 1384 } 1385 Disassembler::decode(rh_begin, rh_end); 1386 } else { 1387 tty->print_cr(" associated result handler: " PTR_FORMAT, p2i(rh_begin)); 1388 } 1389 #endif 1390 } 1391 // add handler to library 1392 _fingerprints->append(fingerprint); 1393 _handlers->append(handler); 1394 // set handler index 1395 assert(_fingerprints->length() == _handlers->length(), "sanity check"); 1396 handler_index = _fingerprints->length() - 1; 1397 } 1398 } 1399 // Set handler under SignatureHandlerLibrary_lock 1400 if (handler_index < 0) { 1401 // use generic signature handler 1402 method->set_signature_handler(Interpreter::slow_signature_handler()); 1403 } else { 1404 // set handler 1405 method->set_signature_handler(_handlers->at(handler_index)); 1406 } 1407 } else { 1408 DEBUG_ONLY(JavaThread::current()->check_possible_safepoint()); 1409 // use generic signature handler 1410 method->set_signature_handler(Interpreter::slow_signature_handler()); 1411 } 1412 } 1413 #ifdef ASSERT 1414 int handler_index = -1; 1415 int fingerprint_index = -2; 1416 { 1417 // '_handlers' and '_fingerprints' are 'GrowableArray's and are NOT synchronized 1418 // in any way if accessed from multiple threads. To avoid races with another 1419 // thread which may change the arrays in the above, mutex protected block, we 1420 // have to protect this read access here with the same mutex as well! 1421 MutexLocker mu(SignatureHandlerLibrary_lock); 1422 if (_handlers != nullptr) { 1423 handler_index = _handlers->find(method->signature_handler()); 1424 uint64_t fingerprint = Fingerprinter(method).fingerprint(); 1425 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1426 fingerprint_index = _fingerprints->find(fingerprint); 1427 } 1428 } 1429 assert(method->signature_handler() == Interpreter::slow_signature_handler() || 1430 handler_index == fingerprint_index, "sanity check"); 1431 #endif // ASSERT 1432 } 1433 1434 void SignatureHandlerLibrary::add(uint64_t fingerprint, address handler) { 1435 int handler_index = -1; 1436 // use customized signature handler 1437 MutexLocker mu(SignatureHandlerLibrary_lock); 1438 // make sure data structure is initialized 1439 initialize(); 1440 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1441 handler_index = _fingerprints->find(fingerprint); 1442 // create handler if necessary 1443 if (handler_index < 0) { 1444 if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) { 1445 tty->cr(); 1446 tty->print_cr("argument handler #%d at " PTR_FORMAT " for fingerprint " UINT64_FORMAT, 1447 _handlers->length(), 1448 p2i(handler), 1449 fingerprint); 1450 } 1451 _fingerprints->append(fingerprint); 1452 _handlers->append(handler); 1453 } else { 1454 if (PrintSignatureHandlers) { 1455 tty->cr(); 1456 tty->print_cr("duplicate argument handler #%d for fingerprint " UINT64_FORMAT "(old: " PTR_FORMAT ", new : " PTR_FORMAT ")", 1457 _handlers->length(), 1458 fingerprint, 1459 p2i(_handlers->at(handler_index)), 1460 p2i(handler)); 1461 } 1462 } 1463 } 1464 1465 1466 BufferBlob* SignatureHandlerLibrary::_handler_blob = nullptr; 1467 address SignatureHandlerLibrary::_handler = nullptr; 1468 GrowableArray<uint64_t>* SignatureHandlerLibrary::_fingerprints = nullptr; 1469 GrowableArray<address>* SignatureHandlerLibrary::_handlers = nullptr; 1470 address SignatureHandlerLibrary::_buffer = nullptr; 1471 1472 1473 JRT_ENTRY(void, InterpreterRuntime::prepare_native_call(JavaThread* current, Method* method)) 1474 methodHandle m(current, method); 1475 assert(m->is_native(), "sanity check"); 1476 // lookup native function entry point if it doesn't exist 1477 if (!m->has_native_function()) { 1478 NativeLookup::lookup(m, CHECK); 1479 } 1480 // make sure signature handler is installed 1481 SignatureHandlerLibrary::add(m); 1482 // The interpreter entry point checks the signature handler first, 1483 // before trying to fetch the native entry point and klass mirror. 1484 // We must set the signature handler last, so that multiple processors 1485 // preparing the same method will be sure to see non-null entry & mirror. 1486 JRT_END 1487 1488 #if defined(IA32) || defined(AMD64) || defined(ARM) 1489 JRT_LEAF(void, InterpreterRuntime::popframe_move_outgoing_args(JavaThread* current, void* src_address, void* dest_address)) 1490 assert(current == JavaThread::current(), "pre-condition"); 1491 if (src_address == dest_address) { 1492 return; 1493 } 1494 ResourceMark rm; 1495 LastFrameAccessor last_frame(current); 1496 assert(last_frame.is_interpreted_frame(), ""); 1497 jint bci = last_frame.bci(); 1498 methodHandle mh(current, last_frame.method()); 1499 Bytecode_invoke invoke(mh, bci); 1500 ArgumentSizeComputer asc(invoke.signature()); 1501 int size_of_arguments = (asc.size() + (invoke.has_receiver() ? 1 : 0)); // receiver 1502 Copy::conjoint_jbytes(src_address, dest_address, 1503 size_of_arguments * Interpreter::stackElementSize); 1504 JRT_END 1505 #endif 1506 1507 #if INCLUDE_JVMTI 1508 // This is a support of the JVMTI PopFrame interface. 1509 // Make sure it is an invokestatic of a polymorphic intrinsic that has a member_name argument 1510 // and return it as a vm_result so that it can be reloaded in the list of invokestatic parameters. 1511 // The member_name argument is a saved reference (in local#0) to the member_name. 1512 // For backward compatibility with some JDK versions (7, 8) it can also be a direct method handle. 1513 // FIXME: remove DMH case after j.l.i.InvokerBytecodeGenerator code shape is updated. 1514 JRT_ENTRY(void, InterpreterRuntime::member_name_arg_or_null(JavaThread* current, address member_name, 1515 Method* method, address bcp)) 1516 Bytecodes::Code code = Bytecodes::code_at(method, bcp); 1517 if (code != Bytecodes::_invokestatic) { 1518 return; 1519 } 1520 ConstantPool* cpool = method->constants(); 1521 int cp_index = Bytes::get_native_u2(bcp + 1); 1522 Symbol* cname = cpool->klass_name_at(cpool->klass_ref_index_at(cp_index, code)); 1523 Symbol* mname = cpool->name_ref_at(cp_index, code); 1524 1525 if (MethodHandles::has_member_arg(cname, mname)) { 1526 oop member_name_oop = cast_to_oop(member_name); 1527 if (java_lang_invoke_DirectMethodHandle::is_instance(member_name_oop)) { 1528 // FIXME: remove after j.l.i.InvokerBytecodeGenerator code shape is updated. 1529 member_name_oop = java_lang_invoke_DirectMethodHandle::member(member_name_oop); 1530 } 1531 current->set_vm_result(member_name_oop); 1532 } else { 1533 current->set_vm_result(nullptr); 1534 } 1535 JRT_END 1536 #endif // INCLUDE_JVMTI 1537 1538 #ifndef PRODUCT 1539 // This must be a JRT_LEAF function because the interpreter must save registers on x86 to 1540 // call this, which changes rsp and makes the interpreter's expression stack not walkable. 1541 // The generated code still uses call_VM because that will set up the frame pointer for 1542 // bcp and method. 1543 JRT_LEAF(intptr_t, InterpreterRuntime::trace_bytecode(JavaThread* current, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2)) 1544 assert(current == JavaThread::current(), "pre-condition"); 1545 LastFrameAccessor last_frame(current); 1546 assert(last_frame.is_interpreted_frame(), "must be an interpreted frame"); 1547 methodHandle mh(current, last_frame.method()); 1548 BytecodeTracer::trace_interpreter(mh, last_frame.bcp(), tos, tos2); 1549 return preserve_this_value; 1550 JRT_END 1551 #endif // !PRODUCT