1 /* 2 * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.inline.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/systemDictionary.hpp" 29 #include "classfile/vmClasses.hpp" 30 #include "classfile/vmSymbols.hpp" 31 #include "code/codeCache.hpp" 32 #include "compiler/compilationPolicy.hpp" 33 #include "compiler/compileBroker.hpp" 34 #include "compiler/disassembler.hpp" 35 #include "gc/shared/barrierSetNMethod.hpp" 36 #include "gc/shared/collectedHeap.hpp" 37 #include "interpreter/bytecodeTracer.hpp" 38 #include "interpreter/interpreter.hpp" 39 #include "interpreter/interpreterRuntime.hpp" 40 #include "interpreter/linkResolver.hpp" 41 #include "interpreter/templateTable.hpp" 42 #include "jvm_io.h" 43 #include "logging/log.hpp" 44 #include "memory/oopFactory.hpp" 45 #include "memory/resourceArea.hpp" 46 #include "memory/universe.hpp" 47 #include "oops/constantPool.hpp" 48 #include "oops/cpCache.inline.hpp" 49 #include "oops/flatArrayKlass.hpp" 50 #include "oops/flatArrayOop.inline.hpp" 51 #include "oops/inlineKlass.inline.hpp" 52 #include "oops/instanceKlass.inline.hpp" 53 #include "oops/klass.inline.hpp" 54 #include "oops/methodData.hpp" 55 #include "oops/objArrayKlass.hpp" 56 #include "oops/objArrayOop.inline.hpp" 57 #include "oops/oop.inline.hpp" 58 #include "oops/symbol.hpp" 59 #include "prims/jvmtiExport.hpp" 60 #include "prims/methodHandles.hpp" 61 #include "prims/nativeLookup.hpp" 62 #include "runtime/atomic.hpp" 63 #include "runtime/continuation.hpp" 64 #include "runtime/deoptimization.hpp" 65 #include "runtime/fieldDescriptor.inline.hpp" 66 #include "runtime/frame.inline.hpp" 67 #include "runtime/handles.inline.hpp" 68 #include "runtime/icache.hpp" 69 #include "runtime/interfaceSupport.inline.hpp" 70 #include "runtime/java.hpp" 71 #include "runtime/javaCalls.hpp" 72 #include "runtime/jfieldIDWorkaround.hpp" 73 #include "runtime/osThread.hpp" 74 #include "runtime/sharedRuntime.hpp" 75 #include "runtime/stackWatermarkSet.hpp" 76 #include "runtime/stubRoutines.hpp" 77 #include "runtime/synchronizer.hpp" 78 #include "runtime/threadCritical.hpp" 79 #include "utilities/align.hpp" 80 #include "utilities/copy.hpp" 81 #include "utilities/events.hpp" 82 #include "utilities/globalDefinitions.hpp" 83 #ifdef COMPILER2 84 #include "opto/runtime.hpp" 85 #endif 86 87 // Helper class to access current interpreter state 88 class LastFrameAccessor : public StackObj { 89 frame _last_frame; 90 public: 91 LastFrameAccessor(JavaThread* current) { 92 assert(current == Thread::current(), "sanity"); 93 _last_frame = current->last_frame(); 94 } 95 bool is_interpreted_frame() const { return _last_frame.is_interpreted_frame(); } 96 Method* method() const { return _last_frame.interpreter_frame_method(); } 97 address bcp() const { return _last_frame.interpreter_frame_bcp(); } 98 int bci() const { return _last_frame.interpreter_frame_bci(); } 99 address mdp() const { return _last_frame.interpreter_frame_mdp(); } 100 101 void set_bcp(address bcp) { _last_frame.interpreter_frame_set_bcp(bcp); } 102 void set_mdp(address dp) { _last_frame.interpreter_frame_set_mdp(dp); } 103 104 // pass method to avoid calling unsafe bcp_to_method (partial fix 4926272) 105 Bytecodes::Code code() const { return Bytecodes::code_at(method(), bcp()); } 106 107 Bytecode bytecode() const { return Bytecode(method(), bcp()); } 108 int get_index_u1(Bytecodes::Code bc) const { return bytecode().get_index_u1(bc); } 109 int get_index_u2(Bytecodes::Code bc) const { return bytecode().get_index_u2(bc); } 110 int get_index_u2_cpcache(Bytecodes::Code bc) const 111 { return bytecode().get_index_u2_cpcache(bc); } 112 int get_index_u4(Bytecodes::Code bc) const { return bytecode().get_index_u4(bc); } 113 int number_of_dimensions() const { return bcp()[3]; } 114 ConstantPoolCacheEntry* cache_entry_at(int i) const 115 { return method()->constants()->cache()->entry_at(i); } 116 ConstantPoolCacheEntry* cache_entry() const { return cache_entry_at(Bytes::get_native_u2(bcp() + 1)); } 117 118 oop callee_receiver(Symbol* signature) { 119 return _last_frame.interpreter_callee_receiver(signature); 120 } 121 BasicObjectLock* monitor_begin() const { 122 return _last_frame.interpreter_frame_monitor_begin(); 123 } 124 BasicObjectLock* monitor_end() const { 125 return _last_frame.interpreter_frame_monitor_end(); 126 } 127 BasicObjectLock* next_monitor(BasicObjectLock* current) const { 128 return _last_frame.next_monitor_in_interpreter_frame(current); 129 } 130 131 frame& get_frame() { return _last_frame; } 132 }; 133 134 //------------------------------------------------------------------------------------------------------------------------ 135 // State accessors 136 137 void InterpreterRuntime::set_bcp_and_mdp(address bcp, JavaThread* current) { 138 LastFrameAccessor last_frame(current); 139 last_frame.set_bcp(bcp); 140 if (ProfileInterpreter) { 141 // ProfileTraps uses MDOs independently of ProfileInterpreter. 142 // That is why we must check both ProfileInterpreter and mdo != nullptr. 143 MethodData* mdo = last_frame.method()->method_data(); 144 if (mdo != nullptr) { 145 NEEDS_CLEANUP; 146 last_frame.set_mdp(mdo->bci_to_dp(last_frame.bci())); 147 } 148 } 149 } 150 151 //------------------------------------------------------------------------------------------------------------------------ 152 // Constants 153 154 155 JRT_ENTRY(void, InterpreterRuntime::ldc(JavaThread* current, bool wide)) 156 // access constant pool 157 LastFrameAccessor last_frame(current); 158 ConstantPool* pool = last_frame.method()->constants(); 159 int index = wide ? last_frame.get_index_u2(Bytecodes::_ldc_w) : last_frame.get_index_u1(Bytecodes::_ldc); 160 constantTag tag = pool->tag_at(index); 161 162 assert (tag.is_unresolved_klass() || tag.is_klass(), "wrong ldc call"); 163 Klass* klass = pool->klass_at(index, CHECK); 164 oop java_class = tag.is_Qdescriptor_klass() 165 ? InlineKlass::cast(klass)->val_mirror() 166 : klass->java_mirror(); 167 current->set_vm_result(java_class); 168 JRT_END 169 170 JRT_ENTRY(void, InterpreterRuntime::resolve_ldc(JavaThread* current, Bytecodes::Code bytecode)) { 171 assert(bytecode == Bytecodes::_ldc || 172 bytecode == Bytecodes::_ldc_w || 173 bytecode == Bytecodes::_ldc2_w || 174 bytecode == Bytecodes::_fast_aldc || 175 bytecode == Bytecodes::_fast_aldc_w, "wrong bc"); 176 ResourceMark rm(current); 177 const bool is_fast_aldc = (bytecode == Bytecodes::_fast_aldc || 178 bytecode == Bytecodes::_fast_aldc_w); 179 LastFrameAccessor last_frame(current); 180 methodHandle m (current, last_frame.method()); 181 Bytecode_loadconstant ldc(m, last_frame.bci()); 182 183 // Double-check the size. (Condy can have any type.) 184 BasicType type = ldc.result_type(); 185 switch (type2size[type]) { 186 case 2: guarantee(bytecode == Bytecodes::_ldc2_w, ""); break; 187 case 1: guarantee(bytecode != Bytecodes::_ldc2_w, ""); break; 188 default: ShouldNotReachHere(); 189 } 190 191 // Resolve the constant. This does not do unboxing. 192 // But it does replace Universe::the_null_sentinel by null. 193 oop result = ldc.resolve_constant(CHECK); 194 assert(result != nullptr || is_fast_aldc, "null result only valid for fast_aldc"); 195 196 #ifdef ASSERT 197 { 198 // The bytecode wrappers aren't GC-safe so construct a new one 199 Bytecode_loadconstant ldc2(m, last_frame.bci()); 200 int rindex = ldc2.cache_index(); 201 if (rindex < 0) 202 rindex = m->constants()->cp_to_object_index(ldc2.pool_index()); 203 if (rindex >= 0) { 204 oop coop = m->constants()->resolved_reference_at(rindex); 205 oop roop = (result == nullptr ? Universe::the_null_sentinel() : result); 206 assert(roop == coop, "expected result for assembly code"); 207 } 208 } 209 #endif 210 current->set_vm_result(result); 211 if (!is_fast_aldc) { 212 // Tell the interpreter how to unbox the primitive. 213 guarantee(java_lang_boxing_object::is_instance(result, type), ""); 214 int offset = java_lang_boxing_object::value_offset(type); 215 intptr_t flags = ((as_TosState(type) << ConstantPoolCacheEntry::tos_state_shift) 216 | (offset & ConstantPoolCacheEntry::field_index_mask)); 217 current->set_vm_result_2((Metadata*)flags); 218 } 219 } 220 JRT_END 221 222 223 //------------------------------------------------------------------------------------------------------------------------ 224 // Allocation 225 226 JRT_ENTRY(void, InterpreterRuntime::_new(JavaThread* current, ConstantPool* pool, int index)) 227 Klass* k = pool->klass_at(index, CHECK); 228 InstanceKlass* klass = InstanceKlass::cast(k); 229 230 if (klass->is_inline_klass()) { 231 THROW(vmSymbols::java_lang_InstantiationError()); 232 } 233 234 // Make sure we are not instantiating an abstract klass 235 klass->check_valid_for_instantiation(true, CHECK); 236 237 // Make sure klass is initialized 238 klass->initialize(CHECK); 239 240 // At this point the class may not be fully initialized 241 // because of recursive initialization. If it is fully 242 // initialized & has_finalized is not set, we rewrite 243 // it into its fast version (Note: no locking is needed 244 // here since this is an atomic byte write and can be 245 // done more than once). 246 // 247 // Note: In case of classes with has_finalized we don't 248 // rewrite since that saves us an extra check in 249 // the fast version which then would call the 250 // slow version anyway (and do a call back into 251 // Java). 252 // If we have a breakpoint, then we don't rewrite 253 // because the _breakpoint bytecode would be lost. 254 oop obj = klass->allocate_instance(CHECK); 255 current->set_vm_result(obj); 256 JRT_END 257 258 JRT_ENTRY(void, InterpreterRuntime::aconst_init(JavaThread* current, ConstantPool* pool, int index)) 259 // Getting the InlineKlass 260 Klass* k = pool->klass_at(index, CHECK); 261 if (!k->is_inline_klass()) { 262 // inconsistency with 'new' which throws an InstantiationError 263 // in the future, aconst_init will just return null instead of throwing an exception 264 THROW(vmSymbols::java_lang_IncompatibleClassChangeError()); 265 } 266 assert(k->is_inline_klass(), "aconst_init argument must be the inline type class"); 267 InlineKlass* vklass = InlineKlass::cast(k); 268 269 vklass->initialize(CHECK); 270 oop res = vklass->default_value(); 271 current->set_vm_result(res); 272 JRT_END 273 274 JRT_ENTRY(int, InterpreterRuntime::withfield(JavaThread* current, ConstantPoolCacheEntry* cpe, uintptr_t ptr)) 275 oop obj = nullptr; 276 int recv_offset = type2size[as_BasicType(cpe->flag_state())]; 277 assert(frame::interpreter_frame_expression_stack_direction() == -1, "currently is -1 on all platforms"); 278 int ret_adj = (recv_offset + type2size[T_OBJECT] )* AbstractInterpreter::stackElementSize; 279 int offset = cpe->f2_as_offset(); 280 obj = (oopDesc*)(((uintptr_t*)ptr)[recv_offset * Interpreter::stackElementWords]); 281 if (obj == nullptr) { 282 THROW_(vmSymbols::java_lang_NullPointerException(), ret_adj); 283 } 284 assert(oopDesc::is_oop(obj), "Verifying receiver"); 285 assert(obj->klass()->is_inline_klass(), "Must have been checked during resolution"); 286 instanceHandle old_value_h(THREAD, (instanceOop)obj); 287 oop ref = nullptr; 288 if (cpe->flag_state() == atos) { 289 ref = *(oopDesc**)ptr; 290 } 291 Handle ref_h(THREAD, ref); 292 InlineKlass* ik = InlineKlass::cast(old_value_h()->klass()); 293 // Ensure that the class is initialized or being initialized 294 // If the class is in error state, the creation of a new value should not be allowed 295 ik->initialize(CHECK_(ret_adj)); 296 297 bool can_skip = false; 298 switch(cpe->flag_state()) { 299 case ztos: 300 if (old_value_h()->bool_field(offset) == (jboolean)(*(jint*)ptr)) can_skip = true; 301 break; 302 case btos: 303 if (old_value_h()->byte_field(offset) == (jbyte)(*(jint*)ptr)) can_skip = true; 304 break; 305 case ctos: 306 if (old_value_h()->char_field(offset) == (jchar)(*(jint*)ptr)) can_skip = true; 307 break; 308 case stos: 309 if (old_value_h()->short_field(offset) == (jshort)(*(jint*)ptr)) can_skip = true; 310 break; 311 case itos: 312 if (old_value_h()->int_field(offset) == *(jint*)ptr) can_skip = true; 313 break; 314 case ltos: 315 if (old_value_h()->long_field(offset) == *(jlong*)ptr) can_skip = true; 316 break; 317 case ftos: 318 if (memcmp(old_value_h()->field_addr<jfloat>(offset), (jfloat*)ptr, sizeof(jfloat)) == 0) can_skip = true; 319 break; 320 case dtos: 321 if (memcmp(old_value_h()->field_addr<jdouble>(offset), (jdouble*)ptr, sizeof(jdouble)) == 0) can_skip = true; 322 break; 323 case atos: 324 if (!cpe->is_inlined() && old_value_h()->obj_field(offset) == ref_h()) can_skip = true; 325 break; 326 default: 327 break; 328 } 329 if (can_skip) { 330 current->set_vm_result(old_value_h()); 331 return ret_adj; 332 } 333 334 instanceOop new_value = ik->allocate_instance_buffer(CHECK_(ret_adj)); 335 Handle new_value_h = Handle(THREAD, new_value); 336 ik->inline_copy_oop_to_new_oop(old_value_h(), new_value_h()); 337 switch(cpe->flag_state()) { 338 case ztos: 339 new_value_h()->bool_field_put(offset, (jboolean)(*(jint*)ptr)); 340 break; 341 case btos: 342 new_value_h()->byte_field_put(offset, (jbyte)(*(jint*)ptr)); 343 break; 344 case ctos: 345 new_value_h()->char_field_put(offset, (jchar)(*(jint*)ptr)); 346 break; 347 case stos: 348 new_value_h()->short_field_put(offset, (jshort)(*(jint*)ptr)); 349 break; 350 case itos: 351 new_value_h()->int_field_put(offset, (*(jint*)ptr)); 352 break; 353 case ltos: 354 new_value_h()->long_field_put(offset, *(jlong*)ptr); 355 break; 356 case ftos: 357 new_value_h()->float_field_put(offset, *(jfloat*)ptr); 358 break; 359 case dtos: 360 new_value_h()->double_field_put(offset, *(jdouble*)ptr); 361 break; 362 case atos: 363 { 364 if (cpe->is_null_free_inline_type()) { 365 if (!cpe->is_inlined()) { 366 if (ref_h() == nullptr) { 367 THROW_(vmSymbols::java_lang_NullPointerException(), ret_adj); 368 } 369 new_value_h()->obj_field_put(offset, ref_h()); 370 } else { 371 int field_index = cpe->field_index(); 372 InlineKlass* field_ik = InlineKlass::cast(ik->get_inline_type_field_klass(field_index)); 373 field_ik->write_inlined_field(new_value_h(), offset, ref_h(), CHECK_(ret_adj)); 374 } 375 } else { 376 new_value_h()->obj_field_put(offset, ref_h()); 377 } 378 } 379 break; 380 default: 381 ShouldNotReachHere(); 382 } 383 current->set_vm_result(new_value_h()); 384 return ret_adj; 385 JRT_END 386 387 JRT_ENTRY(void, InterpreterRuntime::uninitialized_static_inline_type_field(JavaThread* current, oopDesc* mirror, int index)) 388 // The interpreter tries to access an inline static field that has not been initialized. 389 // This situation can happen in different scenarios: 390 // 1 - if the load or initialization of the field failed during step 8 of 391 // the initialization of the holder of the field, in this case the access to the field 392 // must fail 393 // 2 - it can also happen when the initialization of the holder class triggered the initialization of 394 // another class which accesses this field in its static initializer, in this case the 395 // access must succeed to allow circularity 396 // The code below tries to load and initialize the field's class again before returning the default value. 397 // If the field was not initialized because of an error, an exception should be thrown. 398 // If the class is being initialized, the default value is returned. 399 instanceHandle mirror_h(THREAD, (instanceOop)mirror); 400 InstanceKlass* klass = InstanceKlass::cast(java_lang_Class::as_Klass(mirror)); 401 assert(klass->field_signature(index)->is_Q_signature(), "Sanity check"); 402 if (klass->is_being_initialized() && klass->is_init_thread(THREAD)) { 403 int offset = klass->field_offset(index); 404 Klass* field_k = klass->get_inline_type_field_klass_or_null(index); 405 if (field_k == nullptr) { 406 field_k = SystemDictionary::resolve_or_fail(klass->field_signature(index)->fundamental_name(THREAD), 407 Handle(THREAD, klass->class_loader()), 408 Handle(THREAD, klass->protection_domain()), 409 true, CHECK); 410 assert(field_k != nullptr, "Should have been loaded or an exception thrown above"); 411 klass->set_inline_type_field_klass(index, field_k); 412 } 413 field_k->initialize(CHECK); 414 oop defaultvalue = InlineKlass::cast(field_k)->default_value(); 415 // It is safe to initialize the static field because 1) the current thread is the initializing thread 416 // and is the only one that can access it, and 2) the field is actually not initialized (i.e. null) 417 // otherwise the JVM should not be executing this code. 418 mirror_h()->obj_field_put(offset, defaultvalue); 419 current->set_vm_result(defaultvalue); 420 } else { 421 assert(klass->is_in_error_state(), "If not initializing, initialization must have failed to get there"); 422 ResourceMark rm(THREAD); 423 const char* desc = "Could not initialize class "; 424 const char* className = klass->external_name(); 425 size_t msglen = strlen(desc) + strlen(className) + 1; 426 char* message = NEW_RESOURCE_ARRAY(char, msglen); 427 if (nullptr == message) { 428 // Out of memory: can't create detailed error message 429 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 430 } else { 431 jio_snprintf(message, msglen, "%s%s", desc, className); 432 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 433 } 434 } 435 JRT_END 436 437 JRT_ENTRY(void, InterpreterRuntime::read_inlined_field(JavaThread* current, oopDesc* obj, int index, Klass* field_holder)) 438 Handle obj_h(THREAD, obj); 439 440 assert(oopDesc::is_oop(obj), "Sanity check"); 441 442 assert(field_holder->is_instance_klass(), "Sanity check"); 443 InstanceKlass* klass = InstanceKlass::cast(field_holder); 444 445 assert(klass->field_is_inlined(index), "Sanity check"); 446 447 InlineKlass* field_vklass = InlineKlass::cast(klass->get_inline_type_field_klass(index)); 448 449 oop res = field_vklass->read_inlined_field(obj_h(), klass->field_offset(index), CHECK); 450 current->set_vm_result(res); 451 JRT_END 452 453 JRT_ENTRY(void, InterpreterRuntime::newarray(JavaThread* current, BasicType type, jint size)) 454 oop obj = oopFactory::new_typeArray(type, size, CHECK); 455 current->set_vm_result(obj); 456 JRT_END 457 458 459 JRT_ENTRY(void, InterpreterRuntime::anewarray(JavaThread* current, ConstantPool* pool, int index, jint size)) 460 Klass* klass = pool->klass_at(index, CHECK); 461 bool is_qtype_desc = pool->tag_at(index).is_Qdescriptor_klass(); 462 arrayOop obj; 463 if ((!klass->is_array_klass()) && is_qtype_desc) { // Logically creates elements, ensure klass init 464 klass->initialize(CHECK); 465 obj = oopFactory::new_valueArray(klass, size, CHECK); 466 } else { 467 obj = oopFactory::new_objArray(klass, size, CHECK); 468 } 469 current->set_vm_result(obj); 470 JRT_END 471 472 JRT_ENTRY(void, InterpreterRuntime::value_array_load(JavaThread* current, arrayOopDesc* array, int index)) 473 flatArrayHandle vah(current, (flatArrayOop)array); 474 oop value_holder = flatArrayOopDesc::value_alloc_copy_from_index(vah, index, CHECK); 475 current->set_vm_result(value_holder); 476 JRT_END 477 478 JRT_ENTRY(void, InterpreterRuntime::value_array_store(JavaThread* current, void* val, arrayOopDesc* array, int index)) 479 assert(val != nullptr, "can't store null into flat array"); 480 ((flatArrayOop)array)->value_copy_to_index(cast_to_oop(val), index); 481 JRT_END 482 483 JRT_ENTRY(void, InterpreterRuntime::multianewarray(JavaThread* current, jint* first_size_address)) 484 // We may want to pass in more arguments - could make this slightly faster 485 LastFrameAccessor last_frame(current); 486 ConstantPool* constants = last_frame.method()->constants(); 487 int i = last_frame.get_index_u2(Bytecodes::_multianewarray); 488 Klass* klass = constants->klass_at(i, CHECK); 489 bool is_qtype = klass->name()->is_Q_array_signature(); 490 int nof_dims = last_frame.number_of_dimensions(); 491 assert(klass->is_klass(), "not a class"); 492 assert(nof_dims >= 1, "multianewarray rank must be nonzero"); 493 494 if (is_qtype) { // Logically creates elements, ensure klass init 495 klass->initialize(CHECK); 496 } 497 498 // We must create an array of jints to pass to multi_allocate. 499 ResourceMark rm(current); 500 const int small_dims = 10; 501 jint dim_array[small_dims]; 502 jint *dims = &dim_array[0]; 503 if (nof_dims > small_dims) { 504 dims = (jint*) NEW_RESOURCE_ARRAY(jint, nof_dims); 505 } 506 for (int index = 0; index < nof_dims; index++) { 507 // offset from first_size_address is addressed as local[index] 508 int n = Interpreter::local_offset_in_bytes(index)/jintSize; 509 dims[index] = first_size_address[n]; 510 } 511 oop obj = ArrayKlass::cast(klass)->multi_allocate(nof_dims, dims, CHECK); 512 current->set_vm_result(obj); 513 JRT_END 514 515 516 JRT_ENTRY(void, InterpreterRuntime::register_finalizer(JavaThread* current, oopDesc* obj)) 517 assert(oopDesc::is_oop(obj), "must be a valid oop"); 518 assert(obj->klass()->has_finalizer(), "shouldn't be here otherwise"); 519 InstanceKlass::register_finalizer(instanceOop(obj), CHECK); 520 JRT_END 521 522 JRT_ENTRY(jboolean, InterpreterRuntime::is_substitutable(JavaThread* current, oopDesc* aobj, oopDesc* bobj)) 523 assert(oopDesc::is_oop(aobj) && oopDesc::is_oop(bobj), "must be valid oops"); 524 525 Handle ha(THREAD, aobj); 526 Handle hb(THREAD, bobj); 527 JavaValue result(T_BOOLEAN); 528 JavaCallArguments args; 529 args.push_oop(ha); 530 args.push_oop(hb); 531 methodHandle method(current, Universe::is_substitutable_method()); 532 JavaCalls::call(&result, method, &args, THREAD); 533 if (HAS_PENDING_EXCEPTION) { 534 // Something really bad happened because isSubstitutable() should not throw exceptions 535 // If it is an error, just let it propagate 536 // If it is an exception, wrap it into an InternalError 537 if (!PENDING_EXCEPTION->is_a(vmClasses::Error_klass())) { 538 Handle e(THREAD, PENDING_EXCEPTION); 539 CLEAR_PENDING_EXCEPTION; 540 THROW_MSG_CAUSE_(vmSymbols::java_lang_InternalError(), "Internal error in substitutability test", e, false); 541 } 542 } 543 return result.get_jboolean(); 544 JRT_END 545 546 // Quicken instance-of and check-cast bytecodes 547 JRT_ENTRY(void, InterpreterRuntime::quicken_io_cc(JavaThread* current)) 548 // Force resolving; quicken the bytecode 549 LastFrameAccessor last_frame(current); 550 int which = last_frame.get_index_u2(Bytecodes::_checkcast); 551 ConstantPool* cpool = last_frame.method()->constants(); 552 // We'd expect to assert that we're only here to quicken bytecodes, but in a multithreaded 553 // program we might have seen an unquick'd bytecode in the interpreter but have another 554 // thread quicken the bytecode before we get here. 555 // assert( cpool->tag_at(which).is_unresolved_klass(), "should only come here to quicken bytecodes" ); 556 Klass* klass = cpool->klass_at(which, CHECK); 557 current->set_vm_result_2(klass); 558 JRT_END 559 560 561 //------------------------------------------------------------------------------------------------------------------------ 562 // Exceptions 563 564 void InterpreterRuntime::note_trap_inner(JavaThread* current, int reason, 565 const methodHandle& trap_method, int trap_bci) { 566 if (trap_method.not_null()) { 567 MethodData* trap_mdo = trap_method->method_data(); 568 if (trap_mdo == nullptr) { 569 ExceptionMark em(current); 570 JavaThread* THREAD = current; // For exception macros. 571 Method::build_profiling_method_data(trap_method, THREAD); 572 if (HAS_PENDING_EXCEPTION) { 573 // Only metaspace OOM is expected. No Java code executed. 574 assert((PENDING_EXCEPTION->is_a(vmClasses::OutOfMemoryError_klass())), 575 "we expect only an OOM error here"); 576 CLEAR_PENDING_EXCEPTION; 577 } 578 trap_mdo = trap_method->method_data(); 579 // and fall through... 580 } 581 if (trap_mdo != nullptr) { 582 // Update per-method count of trap events. The interpreter 583 // is updating the MDO to simulate the effect of compiler traps. 584 Deoptimization::update_method_data_from_interpreter(trap_mdo, trap_bci, reason); 585 } 586 } 587 } 588 589 // Assume the compiler is (or will be) interested in this event. 590 // If necessary, create an MDO to hold the information, and record it. 591 void InterpreterRuntime::note_trap(JavaThread* current, int reason) { 592 assert(ProfileTraps, "call me only if profiling"); 593 LastFrameAccessor last_frame(current); 594 methodHandle trap_method(current, last_frame.method()); 595 int trap_bci = trap_method->bci_from(last_frame.bcp()); 596 note_trap_inner(current, reason, trap_method, trap_bci); 597 } 598 599 static Handle get_preinitialized_exception(Klass* k, TRAPS) { 600 // get klass 601 InstanceKlass* klass = InstanceKlass::cast(k); 602 assert(klass->is_initialized(), 603 "this klass should have been initialized during VM initialization"); 604 // create instance - do not call constructor since we may have no 605 // (java) stack space left (should assert constructor is empty) 606 Handle exception; 607 oop exception_oop = klass->allocate_instance(CHECK_(exception)); 608 exception = Handle(THREAD, exception_oop); 609 if (StackTraceInThrowable) { 610 java_lang_Throwable::fill_in_stack_trace(exception); 611 } 612 return exception; 613 } 614 615 // Special handling for stack overflow: since we don't have any (java) stack 616 // space left we use the pre-allocated & pre-initialized StackOverflowError 617 // klass to create an stack overflow error instance. We do not call its 618 // constructor for the same reason (it is empty, anyway). 619 JRT_ENTRY(void, InterpreterRuntime::throw_StackOverflowError(JavaThread* current)) 620 Handle exception = get_preinitialized_exception( 621 vmClasses::StackOverflowError_klass(), 622 CHECK); 623 // Increment counter for hs_err file reporting 624 Atomic::inc(&Exceptions::_stack_overflow_errors); 625 // Remove the ScopedValue bindings in case we got a StackOverflowError 626 // while we were trying to manipulate ScopedValue bindings. 627 current->clear_scopedValueBindings(); 628 THROW_HANDLE(exception); 629 JRT_END 630 631 JRT_ENTRY(void, InterpreterRuntime::throw_delayed_StackOverflowError(JavaThread* current)) 632 Handle exception = get_preinitialized_exception( 633 vmClasses::StackOverflowError_klass(), 634 CHECK); 635 java_lang_Throwable::set_message(exception(), 636 Universe::delayed_stack_overflow_error_message()); 637 // Increment counter for hs_err file reporting 638 Atomic::inc(&Exceptions::_stack_overflow_errors); 639 // Remove the ScopedValue bindings in case we got a StackOverflowError 640 // while we were trying to manipulate ScopedValue bindings. 641 current->clear_scopedValueBindings(); 642 THROW_HANDLE(exception); 643 JRT_END 644 645 JRT_ENTRY(void, InterpreterRuntime::create_exception(JavaThread* current, char* name, char* message)) 646 // lookup exception klass 647 TempNewSymbol s = SymbolTable::new_symbol(name); 648 if (ProfileTraps) { 649 if (s == vmSymbols::java_lang_ArithmeticException()) { 650 note_trap(current, Deoptimization::Reason_div0_check); 651 } else if (s == vmSymbols::java_lang_NullPointerException()) { 652 note_trap(current, Deoptimization::Reason_null_check); 653 } 654 } 655 // create exception 656 Handle exception = Exceptions::new_exception(current, s, message); 657 current->set_vm_result(exception()); 658 JRT_END 659 660 661 JRT_ENTRY(void, InterpreterRuntime::create_klass_exception(JavaThread* current, char* name, oopDesc* obj)) 662 // Produce the error message first because note_trap can safepoint 663 ResourceMark rm(current); 664 const char* klass_name = obj->klass()->external_name(); 665 // lookup exception klass 666 TempNewSymbol s = SymbolTable::new_symbol(name); 667 if (ProfileTraps) { 668 if (s == vmSymbols::java_lang_ArrayStoreException()) { 669 note_trap(current, Deoptimization::Reason_array_check); 670 } else { 671 note_trap(current, Deoptimization::Reason_class_check); 672 } 673 } 674 // create exception, with klass name as detail message 675 Handle exception = Exceptions::new_exception(current, s, klass_name); 676 current->set_vm_result(exception()); 677 JRT_END 678 679 JRT_ENTRY(void, InterpreterRuntime::throw_ArrayIndexOutOfBoundsException(JavaThread* current, arrayOopDesc* a, jint index)) 680 // Produce the error message first because note_trap can safepoint 681 ResourceMark rm(current); 682 stringStream ss; 683 ss.print("Index %d out of bounds for length %d", index, a->length()); 684 685 if (ProfileTraps) { 686 note_trap(current, Deoptimization::Reason_range_check); 687 } 688 689 THROW_MSG(vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), ss.as_string()); 690 JRT_END 691 692 JRT_ENTRY(void, InterpreterRuntime::throw_ClassCastException( 693 JavaThread* current, oopDesc* obj)) 694 695 // Produce the error message first because note_trap can safepoint 696 ResourceMark rm(current); 697 char* message = SharedRuntime::generate_class_cast_message( 698 current, obj->klass()); 699 700 if (ProfileTraps) { 701 note_trap(current, Deoptimization::Reason_class_check); 702 } 703 704 // create exception 705 THROW_MSG(vmSymbols::java_lang_ClassCastException(), message); 706 JRT_END 707 708 // exception_handler_for_exception(...) returns the continuation address, 709 // the exception oop (via TLS) and sets the bci/bcp for the continuation. 710 // The exception oop is returned to make sure it is preserved over GC (it 711 // is only on the stack if the exception was thrown explicitly via athrow). 712 // During this operation, the expression stack contains the values for the 713 // bci where the exception happened. If the exception was propagated back 714 // from a call, the expression stack contains the values for the bci at the 715 // invoke w/o arguments (i.e., as if one were inside the call). 716 JRT_ENTRY(address, InterpreterRuntime::exception_handler_for_exception(JavaThread* current, oopDesc* exception)) 717 // We get here after we have unwound from a callee throwing an exception 718 // into the interpreter. Any deferred stack processing is notified of 719 // the event via the StackWatermarkSet. 720 StackWatermarkSet::after_unwind(current); 721 722 LastFrameAccessor last_frame(current); 723 Handle h_exception(current, exception); 724 methodHandle h_method (current, last_frame.method()); 725 constantPoolHandle h_constants(current, h_method->constants()); 726 bool should_repeat; 727 int handler_bci; 728 int current_bci = last_frame.bci(); 729 730 if (current->frames_to_pop_failed_realloc() > 0) { 731 // Allocation of scalar replaced object used in this frame 732 // failed. Unconditionally pop the frame. 733 current->dec_frames_to_pop_failed_realloc(); 734 current->set_vm_result(h_exception()); 735 // If the method is synchronized we already unlocked the monitor 736 // during deoptimization so the interpreter needs to skip it when 737 // the frame is popped. 738 current->set_do_not_unlock_if_synchronized(true); 739 return Interpreter::remove_activation_entry(); 740 } 741 742 // Need to do this check first since when _do_not_unlock_if_synchronized 743 // is set, we don't want to trigger any classloading which may make calls 744 // into java, or surprisingly find a matching exception handler for bci 0 745 // since at this moment the method hasn't been "officially" entered yet. 746 if (current->do_not_unlock_if_synchronized()) { 747 ResourceMark rm; 748 assert(current_bci == 0, "bci isn't zero for do_not_unlock_if_synchronized"); 749 current->set_vm_result(exception); 750 return Interpreter::remove_activation_entry(); 751 } 752 753 do { 754 should_repeat = false; 755 756 // assertions 757 assert(h_exception.not_null(), "null exceptions should be handled by athrow"); 758 // Check that exception is a subclass of Throwable. 759 assert(h_exception->is_a(vmClasses::Throwable_klass()), 760 "Exception not subclass of Throwable"); 761 762 // tracing 763 if (log_is_enabled(Info, exceptions)) { 764 ResourceMark rm(current); 765 stringStream tempst; 766 tempst.print("interpreter method <%s>\n" 767 " at bci %d for thread " INTPTR_FORMAT " (%s)", 768 h_method->print_value_string(), current_bci, p2i(current), current->name()); 769 Exceptions::log_exception(h_exception, tempst.as_string()); 770 } 771 // Don't go paging in something which won't be used. 772 // else if (extable->length() == 0) { 773 // // disabled for now - interpreter is not using shortcut yet 774 // // (shortcut is not to call runtime if we have no exception handlers) 775 // // warning("performance bug: should not call runtime if method has no exception handlers"); 776 // } 777 // for AbortVMOnException flag 778 Exceptions::debug_check_abort(h_exception); 779 780 // exception handler lookup 781 Klass* klass = h_exception->klass(); 782 handler_bci = Method::fast_exception_handler_bci_for(h_method, klass, current_bci, THREAD); 783 if (HAS_PENDING_EXCEPTION) { 784 // We threw an exception while trying to find the exception handler. 785 // Transfer the new exception to the exception handle which will 786 // be set into thread local storage, and do another lookup for an 787 // exception handler for this exception, this time starting at the 788 // BCI of the exception handler which caused the exception to be 789 // thrown (bug 4307310). 790 h_exception = Handle(THREAD, PENDING_EXCEPTION); 791 CLEAR_PENDING_EXCEPTION; 792 if (handler_bci >= 0) { 793 current_bci = handler_bci; 794 should_repeat = true; 795 } 796 } 797 } while (should_repeat == true); 798 799 #if INCLUDE_JVMCI 800 if (EnableJVMCI && h_method->method_data() != nullptr) { 801 ResourceMark rm(current); 802 ProfileData* pdata = h_method->method_data()->allocate_bci_to_data(current_bci, nullptr); 803 if (pdata != nullptr && pdata->is_BitData()) { 804 BitData* bit_data = (BitData*) pdata; 805 bit_data->set_exception_seen(); 806 } 807 } 808 #endif 809 810 // notify JVMTI of an exception throw; JVMTI will detect if this is a first 811 // time throw or a stack unwinding throw and accordingly notify the debugger 812 if (JvmtiExport::can_post_on_exceptions()) { 813 JvmtiExport::post_exception_throw(current, h_method(), last_frame.bcp(), h_exception()); 814 } 815 816 address continuation = nullptr; 817 address handler_pc = nullptr; 818 if (handler_bci < 0 || !current->stack_overflow_state()->reguard_stack((address) &continuation)) { 819 // Forward exception to callee (leaving bci/bcp untouched) because (a) no 820 // handler in this method, or (b) after a stack overflow there is not yet 821 // enough stack space available to reprotect the stack. 822 continuation = Interpreter::remove_activation_entry(); 823 #if COMPILER2_OR_JVMCI 824 // Count this for compilation purposes 825 h_method->interpreter_throwout_increment(THREAD); 826 #endif 827 } else { 828 // handler in this method => change bci/bcp to handler bci/bcp and continue there 829 handler_pc = h_method->code_base() + handler_bci; 830 #ifndef ZERO 831 set_bcp_and_mdp(handler_pc, current); 832 continuation = Interpreter::dispatch_table(vtos)[*handler_pc]; 833 #else 834 continuation = (address)(intptr_t) handler_bci; 835 #endif 836 } 837 838 // notify debugger of an exception catch 839 // (this is good for exceptions caught in native methods as well) 840 if (JvmtiExport::can_post_on_exceptions()) { 841 JvmtiExport::notice_unwind_due_to_exception(current, h_method(), handler_pc, h_exception(), (handler_pc != nullptr)); 842 } 843 844 current->set_vm_result(h_exception()); 845 return continuation; 846 JRT_END 847 848 849 JRT_ENTRY(void, InterpreterRuntime::throw_pending_exception(JavaThread* current)) 850 assert(current->has_pending_exception(), "must only be called if there's an exception pending"); 851 // nothing to do - eventually we should remove this code entirely (see comments @ call sites) 852 JRT_END 853 854 855 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodError(JavaThread* current)) 856 THROW(vmSymbols::java_lang_AbstractMethodError()); 857 JRT_END 858 859 // This method is called from the "abstract_entry" of the interpreter. 860 // At that point, the arguments have already been removed from the stack 861 // and therefore we don't have the receiver object at our fingertips. (Though, 862 // on some platforms the receiver still resides in a register...). Thus, 863 // we have no choice but print an error message not containing the receiver 864 // type. 865 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodErrorWithMethod(JavaThread* current, 866 Method* missingMethod)) 867 ResourceMark rm(current); 868 assert(missingMethod != nullptr, "sanity"); 869 methodHandle m(current, missingMethod); 870 LinkResolver::throw_abstract_method_error(m, THREAD); 871 JRT_END 872 873 JRT_ENTRY(void, InterpreterRuntime::throw_AbstractMethodErrorVerbose(JavaThread* current, 874 Klass* recvKlass, 875 Method* missingMethod)) 876 ResourceMark rm(current); 877 methodHandle mh = methodHandle(current, missingMethod); 878 LinkResolver::throw_abstract_method_error(mh, recvKlass, THREAD); 879 JRT_END 880 881 JRT_ENTRY(void, InterpreterRuntime::throw_InstantiationError(JavaThread* current)) 882 THROW(vmSymbols::java_lang_InstantiationError()); 883 JRT_END 884 885 886 JRT_ENTRY(void, InterpreterRuntime::throw_IncompatibleClassChangeError(JavaThread* current)) 887 THROW(vmSymbols::java_lang_IncompatibleClassChangeError()); 888 JRT_END 889 890 JRT_ENTRY(void, InterpreterRuntime::throw_IncompatibleClassChangeErrorVerbose(JavaThread* current, 891 Klass* recvKlass, 892 Klass* interfaceKlass)) 893 ResourceMark rm(current); 894 char buf[1000]; 895 buf[0] = '\0'; 896 jio_snprintf(buf, sizeof(buf), 897 "Class %s does not implement the requested interface %s", 898 recvKlass ? recvKlass->external_name() : "nullptr", 899 interfaceKlass ? interfaceKlass->external_name() : "nullptr"); 900 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), buf); 901 JRT_END 902 903 JRT_ENTRY(void, InterpreterRuntime::throw_NullPointerException(JavaThread* current)) 904 THROW(vmSymbols::java_lang_NullPointerException()); 905 JRT_END 906 907 //------------------------------------------------------------------------------------------------------------------------ 908 // Fields 909 // 910 911 void InterpreterRuntime::resolve_get_put(JavaThread* current, Bytecodes::Code bytecode) { 912 // resolve field 913 fieldDescriptor info; 914 LastFrameAccessor last_frame(current); 915 constantPoolHandle pool(current, last_frame.method()->constants()); 916 methodHandle m(current, last_frame.method()); 917 bool is_put = (bytecode == Bytecodes::_putfield || bytecode == Bytecodes::_nofast_putfield || 918 bytecode == Bytecodes::_putstatic || bytecode == Bytecodes::_withfield); 919 bool is_static = (bytecode == Bytecodes::_getstatic || bytecode == Bytecodes::_putstatic); 920 bool is_inline_type = bytecode == Bytecodes::_withfield; 921 922 { 923 JvmtiHideSingleStepping jhss(current); 924 JavaThread* THREAD = current; // For exception macros. 925 LinkResolver::resolve_field_access(info, pool, last_frame.get_index_u2_cpcache(bytecode), 926 m, bytecode, CHECK); 927 } // end JvmtiHideSingleStepping 928 929 // check if link resolution caused cpCache to be updated 930 ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry(); 931 if (cp_cache_entry->is_resolved(bytecode)) return; 932 933 // compute auxiliary field attributes 934 TosState state = as_TosState(info.field_type()); 935 936 // Resolution of put instructions on final fields is delayed. That is required so that 937 // exceptions are thrown at the correct place (when the instruction is actually invoked). 938 // If we do not resolve an instruction in the current pass, leaving the put_code 939 // set to zero will cause the next put instruction to the same field to reresolve. 940 941 // Resolution of put instructions to final instance fields with invalid updates (i.e., 942 // to final instance fields with updates originating from a method different than <init>) 943 // is inhibited. A putfield instruction targeting an instance final field must throw 944 // an IllegalAccessError if the instruction is not in an instance 945 // initializer method <init>. If resolution were not inhibited, a putfield 946 // in an initializer method could be resolved in the initializer. Subsequent 947 // putfield instructions to the same field would then use cached information. 948 // As a result, those instructions would not pass through the VM. That is, 949 // checks in resolve_field_access() would not be executed for those instructions 950 // and the required IllegalAccessError would not be thrown. 951 // 952 // Also, we need to delay resolving getstatic and putstatic instructions until the 953 // class is initialized. This is required so that access to the static 954 // field will call the initialization function every time until the class 955 // is completely initialized ala. in 2.17.5 in JVM Specification. 956 InstanceKlass* klass = info.field_holder(); 957 bool uninitialized_static = is_static && !klass->is_initialized(); 958 bool has_initialized_final_update = info.field_holder()->major_version() >= 53 && 959 info.has_initialized_final_update(); 960 assert(!(has_initialized_final_update && !info.access_flags().is_final()), "Fields with initialized final updates must be final"); 961 962 Bytecodes::Code get_code = (Bytecodes::Code)0; 963 Bytecodes::Code put_code = (Bytecodes::Code)0; 964 if (!uninitialized_static) { 965 if (is_static) { 966 get_code = Bytecodes::_getstatic; 967 } else { 968 get_code = Bytecodes::_getfield; 969 } 970 if (is_put && is_inline_type) { 971 put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_withfield); 972 } else if ((is_put && !has_initialized_final_update) || !info.access_flags().is_final()) { 973 put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_putfield); 974 } 975 } 976 977 cp_cache_entry->set_field( 978 get_code, 979 put_code, 980 info.field_holder(), 981 info.index(), 982 info.offset(), 983 state, 984 info.access_flags().is_final(), 985 info.access_flags().is_volatile(), 986 info.is_inlined(), 987 info.signature()->is_Q_signature() && info.is_inline_type() 988 ); 989 } 990 991 992 //------------------------------------------------------------------------------------------------------------------------ 993 // Synchronization 994 // 995 // The interpreter's synchronization code is factored out so that it can 996 // be shared by method invocation and synchronized blocks. 997 //%note synchronization_3 998 999 //%note monitor_1 1000 JRT_ENTRY_NO_ASYNC(void, InterpreterRuntime::monitorenter(JavaThread* current, BasicObjectLock* elem)) 1001 #ifdef ASSERT 1002 current->last_frame().interpreter_frame_verify_monitor(elem); 1003 #endif 1004 Handle h_obj(current, elem->obj()); 1005 assert(Universe::heap()->is_in_or_null(h_obj()), 1006 "must be null or an object"); 1007 ObjectSynchronizer::enter(h_obj, elem->lock(), current); 1008 assert(Universe::heap()->is_in_or_null(elem->obj()), 1009 "must be null or an object"); 1010 #ifdef ASSERT 1011 current->last_frame().interpreter_frame_verify_monitor(elem); 1012 #endif 1013 JRT_END 1014 1015 1016 JRT_LEAF(void, InterpreterRuntime::monitorexit(BasicObjectLock* elem)) 1017 oop obj = elem->obj(); 1018 assert(Universe::heap()->is_in(obj), "must be an object"); 1019 // The object could become unlocked through a JNI call, which we have no other checks for. 1020 // Give a fatal message if CheckJNICalls. Otherwise we ignore it. 1021 if (obj->is_unlocked()) { 1022 if (CheckJNICalls) { 1023 fatal("Object has been unlocked by JNI"); 1024 } 1025 return; 1026 } 1027 ObjectSynchronizer::exit(obj, elem->lock(), JavaThread::current()); 1028 // Free entry. If it is not cleared, the exception handling code will try to unlock the monitor 1029 // again at method exit or in the case of an exception. 1030 elem->set_obj(nullptr); 1031 JRT_END 1032 1033 1034 JRT_ENTRY(void, InterpreterRuntime::throw_illegal_monitor_state_exception(JavaThread* current)) 1035 THROW(vmSymbols::java_lang_IllegalMonitorStateException()); 1036 JRT_END 1037 1038 1039 JRT_ENTRY(void, InterpreterRuntime::new_illegal_monitor_state_exception(JavaThread* current)) 1040 // Returns an illegal exception to install into the current thread. The 1041 // pending_exception flag is cleared so normal exception handling does not 1042 // trigger. Any current installed exception will be overwritten. This 1043 // method will be called during an exception unwind. 1044 1045 assert(!HAS_PENDING_EXCEPTION, "no pending exception"); 1046 Handle exception(current, current->vm_result()); 1047 assert(exception() != nullptr, "vm result should be set"); 1048 current->set_vm_result(nullptr); // clear vm result before continuing (may cause memory leaks and assert failures) 1049 exception = get_preinitialized_exception(vmClasses::IllegalMonitorStateException_klass(), CATCH); 1050 current->set_vm_result(exception()); 1051 JRT_END 1052 1053 1054 //------------------------------------------------------------------------------------------------------------------------ 1055 // Invokes 1056 1057 JRT_ENTRY(Bytecodes::Code, InterpreterRuntime::get_original_bytecode_at(JavaThread* current, Method* method, address bcp)) 1058 return method->orig_bytecode_at(method->bci_from(bcp)); 1059 JRT_END 1060 1061 JRT_ENTRY(void, InterpreterRuntime::set_original_bytecode_at(JavaThread* current, Method* method, address bcp, Bytecodes::Code new_code)) 1062 method->set_orig_bytecode_at(method->bci_from(bcp), new_code); 1063 JRT_END 1064 1065 JRT_ENTRY(void, InterpreterRuntime::_breakpoint(JavaThread* current, Method* method, address bcp)) 1066 JvmtiExport::post_raw_breakpoint(current, method, bcp); 1067 JRT_END 1068 1069 void InterpreterRuntime::resolve_invoke(JavaThread* current, Bytecodes::Code bytecode) { 1070 LastFrameAccessor last_frame(current); 1071 // extract receiver from the outgoing argument list if necessary 1072 Handle receiver(current, nullptr); 1073 if (bytecode == Bytecodes::_invokevirtual || bytecode == Bytecodes::_invokeinterface || 1074 bytecode == Bytecodes::_invokespecial) { 1075 ResourceMark rm(current); 1076 methodHandle m (current, last_frame.method()); 1077 Bytecode_invoke call(m, last_frame.bci()); 1078 Symbol* signature = call.signature(); 1079 receiver = Handle(current, last_frame.callee_receiver(signature)); 1080 1081 assert(Universe::heap()->is_in_or_null(receiver()), 1082 "sanity check"); 1083 assert(receiver.is_null() || 1084 !Universe::heap()->is_in(receiver->klass()), 1085 "sanity check"); 1086 } 1087 1088 // resolve method 1089 CallInfo info; 1090 constantPoolHandle pool(current, last_frame.method()->constants()); 1091 1092 methodHandle resolved_method; 1093 1094 { 1095 JvmtiHideSingleStepping jhss(current); 1096 JavaThread* THREAD = current; // For exception macros. 1097 LinkResolver::resolve_invoke(info, receiver, pool, 1098 last_frame.get_index_u2_cpcache(bytecode), bytecode, 1099 THREAD); 1100 1101 if (HAS_PENDING_EXCEPTION) { 1102 if (ProfileTraps && PENDING_EXCEPTION->klass()->name() == vmSymbols::java_lang_NullPointerException()) { 1103 // Preserve the original exception across the call to note_trap() 1104 PreserveExceptionMark pm(current); 1105 // Recording the trap will help the compiler to potentially recognize this exception as "hot" 1106 note_trap(current, Deoptimization::Reason_null_check); 1107 } 1108 return; 1109 } 1110 1111 if (JvmtiExport::can_hotswap_or_post_breakpoint() && info.resolved_method()->is_old()) { 1112 resolved_method = methodHandle(current, info.resolved_method()->get_new_method()); 1113 } else { 1114 resolved_method = methodHandle(current, info.resolved_method()); 1115 } 1116 } // end JvmtiHideSingleStepping 1117 1118 // check if link resolution caused cpCache to be updated 1119 ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry(); 1120 if (cp_cache_entry->is_resolved(bytecode)) return; 1121 1122 #ifdef ASSERT 1123 if (bytecode == Bytecodes::_invokeinterface) { 1124 if (resolved_method->method_holder() == vmClasses::Object_klass()) { 1125 // NOTE: THIS IS A FIX FOR A CORNER CASE in the JVM spec 1126 // (see also CallInfo::set_interface for details) 1127 assert(info.call_kind() == CallInfo::vtable_call || 1128 info.call_kind() == CallInfo::direct_call, ""); 1129 assert(resolved_method->is_final() || info.has_vtable_index(), 1130 "should have been set already"); 1131 } else if (!resolved_method->has_itable_index()) { 1132 // Resolved something like CharSequence.toString. Use vtable not itable. 1133 assert(info.call_kind() != CallInfo::itable_call, ""); 1134 } else { 1135 // Setup itable entry 1136 assert(info.call_kind() == CallInfo::itable_call, ""); 1137 int index = resolved_method->itable_index(); 1138 assert(info.itable_index() == index, ""); 1139 } 1140 } else if (bytecode == Bytecodes::_invokespecial) { 1141 assert(info.call_kind() == CallInfo::direct_call, "must be direct call"); 1142 } else { 1143 assert(info.call_kind() == CallInfo::direct_call || 1144 info.call_kind() == CallInfo::vtable_call, ""); 1145 } 1146 #endif 1147 // Get sender and only set cpCache entry to resolved if it is not an 1148 // interface. The receiver for invokespecial calls within interface 1149 // methods must be checked for every call. 1150 InstanceKlass* sender = pool->pool_holder(); 1151 1152 switch (info.call_kind()) { 1153 case CallInfo::direct_call: 1154 cp_cache_entry->set_direct_call( 1155 bytecode, 1156 resolved_method, 1157 sender->is_interface()); 1158 break; 1159 case CallInfo::vtable_call: 1160 cp_cache_entry->set_vtable_call( 1161 bytecode, 1162 resolved_method, 1163 info.vtable_index()); 1164 break; 1165 case CallInfo::itable_call: 1166 cp_cache_entry->set_itable_call( 1167 bytecode, 1168 info.resolved_klass(), 1169 resolved_method, 1170 info.itable_index()); 1171 break; 1172 default: ShouldNotReachHere(); 1173 } 1174 } 1175 1176 1177 // First time execution: Resolve symbols, create a permanent MethodType object. 1178 void InterpreterRuntime::resolve_invokehandle(JavaThread* current) { 1179 const Bytecodes::Code bytecode = Bytecodes::_invokehandle; 1180 LastFrameAccessor last_frame(current); 1181 1182 // resolve method 1183 CallInfo info; 1184 constantPoolHandle pool(current, last_frame.method()->constants()); 1185 { 1186 JvmtiHideSingleStepping jhss(current); 1187 JavaThread* THREAD = current; // For exception macros. 1188 LinkResolver::resolve_invoke(info, Handle(), pool, 1189 last_frame.get_index_u2_cpcache(bytecode), bytecode, 1190 CHECK); 1191 } // end JvmtiHideSingleStepping 1192 1193 ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry(); 1194 cp_cache_entry->set_method_handle(pool, info); 1195 } 1196 1197 // First time execution: Resolve symbols, create a permanent CallSite object. 1198 void InterpreterRuntime::resolve_invokedynamic(JavaThread* current) { 1199 LastFrameAccessor last_frame(current); 1200 const Bytecodes::Code bytecode = Bytecodes::_invokedynamic; 1201 1202 // resolve method 1203 CallInfo info; 1204 constantPoolHandle pool(current, last_frame.method()->constants()); 1205 int index = last_frame.get_index_u4(bytecode); 1206 { 1207 JvmtiHideSingleStepping jhss(current); 1208 JavaThread* THREAD = current; // For exception macros. 1209 LinkResolver::resolve_invoke(info, Handle(), pool, 1210 index, bytecode, CHECK); 1211 } // end JvmtiHideSingleStepping 1212 1213 pool->cache()->set_dynamic_call(info, pool->decode_invokedynamic_index(index)); 1214 } 1215 1216 // This function is the interface to the assembly code. It returns the resolved 1217 // cpCache entry. This doesn't safepoint, but the helper routines safepoint. 1218 // This function will check for redefinition! 1219 JRT_ENTRY(void, InterpreterRuntime::resolve_from_cache(JavaThread* current, Bytecodes::Code bytecode)) { 1220 switch (bytecode) { 1221 case Bytecodes::_getstatic: 1222 case Bytecodes::_putstatic: 1223 case Bytecodes::_getfield: 1224 case Bytecodes::_putfield: 1225 case Bytecodes::_withfield: 1226 resolve_get_put(current, bytecode); 1227 break; 1228 case Bytecodes::_invokevirtual: 1229 case Bytecodes::_invokespecial: 1230 case Bytecodes::_invokestatic: 1231 case Bytecodes::_invokeinterface: 1232 resolve_invoke(current, bytecode); 1233 break; 1234 case Bytecodes::_invokehandle: 1235 resolve_invokehandle(current); 1236 break; 1237 case Bytecodes::_invokedynamic: 1238 resolve_invokedynamic(current); 1239 break; 1240 default: 1241 fatal("unexpected bytecode: %s", Bytecodes::name(bytecode)); 1242 break; 1243 } 1244 } 1245 JRT_END 1246 1247 //------------------------------------------------------------------------------------------------------------------------ 1248 // Miscellaneous 1249 1250 1251 nmethod* InterpreterRuntime::frequency_counter_overflow(JavaThread* current, address branch_bcp) { 1252 // Enable WXWrite: the function is called directly by interpreter. 1253 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXWrite, current)); 1254 1255 // frequency_counter_overflow_inner can throw async exception. 1256 nmethod* nm = frequency_counter_overflow_inner(current, branch_bcp); 1257 assert(branch_bcp != nullptr || nm == nullptr, "always returns null for non OSR requests"); 1258 if (branch_bcp != nullptr && nm != nullptr) { 1259 // This was a successful request for an OSR nmethod. Because 1260 // frequency_counter_overflow_inner ends with a safepoint check, 1261 // nm could have been unloaded so look it up again. It's unsafe 1262 // to examine nm directly since it might have been freed and used 1263 // for something else. 1264 LastFrameAccessor last_frame(current); 1265 Method* method = last_frame.method(); 1266 int bci = method->bci_from(last_frame.bcp()); 1267 nm = method->lookup_osr_nmethod_for(bci, CompLevel_none, false); 1268 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); 1269 if (nm != nullptr && bs_nm != nullptr) { 1270 // in case the transition passed a safepoint we need to barrier this again 1271 if (!bs_nm->nmethod_osr_entry_barrier(nm)) { 1272 nm = nullptr; 1273 } 1274 } 1275 } 1276 if (nm != nullptr && current->is_interp_only_mode()) { 1277 // Normally we never get an nm if is_interp_only_mode() is true, because 1278 // policy()->event has a check for this and won't compile the method when 1279 // true. However, it's possible for is_interp_only_mode() to become true 1280 // during the compilation. We don't want to return the nm in that case 1281 // because we want to continue to execute interpreted. 1282 nm = nullptr; 1283 } 1284 #ifndef PRODUCT 1285 if (TraceOnStackReplacement) { 1286 if (nm != nullptr) { 1287 tty->print("OSR entry @ pc: " INTPTR_FORMAT ": ", p2i(nm->osr_entry())); 1288 nm->print(); 1289 } 1290 } 1291 #endif 1292 return nm; 1293 } 1294 1295 JRT_ENTRY(nmethod*, 1296 InterpreterRuntime::frequency_counter_overflow_inner(JavaThread* current, address branch_bcp)) 1297 // use UnlockFlagSaver to clear and restore the _do_not_unlock_if_synchronized 1298 // flag, in case this method triggers classloading which will call into Java. 1299 UnlockFlagSaver fs(current); 1300 1301 LastFrameAccessor last_frame(current); 1302 assert(last_frame.is_interpreted_frame(), "must come from interpreter"); 1303 methodHandle method(current, last_frame.method()); 1304 const int branch_bci = branch_bcp != nullptr ? method->bci_from(branch_bcp) : InvocationEntryBci; 1305 const int bci = branch_bcp != nullptr ? method->bci_from(last_frame.bcp()) : InvocationEntryBci; 1306 1307 nmethod* osr_nm = CompilationPolicy::event(method, method, branch_bci, bci, CompLevel_none, nullptr, CHECK_NULL); 1308 1309 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); 1310 if (osr_nm != nullptr && bs_nm != nullptr) { 1311 if (!bs_nm->nmethod_osr_entry_barrier(osr_nm)) { 1312 osr_nm = nullptr; 1313 } 1314 } 1315 return osr_nm; 1316 JRT_END 1317 1318 JRT_LEAF(jint, InterpreterRuntime::bcp_to_di(Method* method, address cur_bcp)) 1319 assert(ProfileInterpreter, "must be profiling interpreter"); 1320 int bci = method->bci_from(cur_bcp); 1321 MethodData* mdo = method->method_data(); 1322 if (mdo == nullptr) return 0; 1323 return mdo->bci_to_di(bci); 1324 JRT_END 1325 1326 #ifdef ASSERT 1327 JRT_LEAF(void, InterpreterRuntime::verify_mdp(Method* method, address bcp, address mdp)) 1328 assert(ProfileInterpreter, "must be profiling interpreter"); 1329 1330 MethodData* mdo = method->method_data(); 1331 assert(mdo != nullptr, "must not be null"); 1332 1333 int bci = method->bci_from(bcp); 1334 1335 address mdp2 = mdo->bci_to_dp(bci); 1336 if (mdp != mdp2) { 1337 ResourceMark rm; 1338 tty->print_cr("FAILED verify : actual mdp %p expected mdp %p @ bci %d", mdp, mdp2, bci); 1339 int current_di = mdo->dp_to_di(mdp); 1340 int expected_di = mdo->dp_to_di(mdp2); 1341 tty->print_cr(" actual di %d expected di %d", current_di, expected_di); 1342 int expected_approx_bci = mdo->data_at(expected_di)->bci(); 1343 int approx_bci = -1; 1344 if (current_di >= 0) { 1345 approx_bci = mdo->data_at(current_di)->bci(); 1346 } 1347 tty->print_cr(" actual bci is %d expected bci %d", approx_bci, expected_approx_bci); 1348 mdo->print_on(tty); 1349 method->print_codes(); 1350 } 1351 assert(mdp == mdp2, "wrong mdp"); 1352 JRT_END 1353 #endif // ASSERT 1354 1355 JRT_ENTRY(void, InterpreterRuntime::update_mdp_for_ret(JavaThread* current, int return_bci)) 1356 assert(ProfileInterpreter, "must be profiling interpreter"); 1357 ResourceMark rm(current); 1358 LastFrameAccessor last_frame(current); 1359 assert(last_frame.is_interpreted_frame(), "must come from interpreter"); 1360 MethodData* h_mdo = last_frame.method()->method_data(); 1361 1362 // Grab a lock to ensure atomic access to setting the return bci and 1363 // the displacement. This can block and GC, invalidating all naked oops. 1364 MutexLocker ml(RetData_lock); 1365 1366 // ProfileData is essentially a wrapper around a derived oop, so we 1367 // need to take the lock before making any ProfileData structures. 1368 ProfileData* data = h_mdo->data_at(h_mdo->dp_to_di(last_frame.mdp())); 1369 guarantee(data != nullptr, "profile data must be valid"); 1370 RetData* rdata = data->as_RetData(); 1371 address new_mdp = rdata->fixup_ret(return_bci, h_mdo); 1372 last_frame.set_mdp(new_mdp); 1373 JRT_END 1374 1375 JRT_ENTRY(MethodCounters*, InterpreterRuntime::build_method_counters(JavaThread* current, Method* m)) 1376 return Method::build_method_counters(current, m); 1377 JRT_END 1378 1379 1380 JRT_ENTRY(void, InterpreterRuntime::at_safepoint(JavaThread* current)) 1381 // We used to need an explicit preserve_arguments here for invoke bytecodes. However, 1382 // stack traversal automatically takes care of preserving arguments for invoke, so 1383 // this is no longer needed. 1384 1385 // JRT_END does an implicit safepoint check, hence we are guaranteed to block 1386 // if this is called during a safepoint 1387 1388 if (JvmtiExport::should_post_single_step()) { 1389 // This function is called by the interpreter when single stepping. Such single 1390 // stepping could unwind a frame. Then, it is important that we process any frames 1391 // that we might return into. 1392 StackWatermarkSet::before_unwind(current); 1393 1394 // We are called during regular safepoints and when the VM is 1395 // single stepping. If any thread is marked for single stepping, 1396 // then we may have JVMTI work to do. 1397 LastFrameAccessor last_frame(current); 1398 JvmtiExport::at_single_stepping_point(current, last_frame.method(), last_frame.bcp()); 1399 } 1400 JRT_END 1401 1402 JRT_LEAF(void, InterpreterRuntime::at_unwind(JavaThread* current)) 1403 assert(current == JavaThread::current(), "pre-condition"); 1404 // This function is called by the interpreter when the return poll found a reason 1405 // to call the VM. The reason could be that we are returning into a not yet safe 1406 // to access frame. We handle that below. 1407 // Note that this path does not check for single stepping, because we do not want 1408 // to single step when unwinding frames for an exception being thrown. Instead, 1409 // such single stepping code will use the safepoint table, which will use the 1410 // InterpreterRuntime::at_safepoint callback. 1411 StackWatermarkSet::before_unwind(current); 1412 JRT_END 1413 1414 JRT_ENTRY(void, InterpreterRuntime::post_field_access(JavaThread* current, oopDesc* obj, 1415 ConstantPoolCacheEntry *cp_entry)) 1416 1417 // check the access_flags for the field in the klass 1418 1419 InstanceKlass* ik = InstanceKlass::cast(cp_entry->f1_as_klass()); 1420 int index = cp_entry->field_index(); 1421 if (!ik->field_status(index).is_access_watched()) return; 1422 1423 bool is_static = (obj == nullptr); 1424 bool is_inlined = cp_entry->is_inlined(); 1425 HandleMark hm(current); 1426 1427 Handle h_obj; 1428 if (!is_static) { 1429 // non-static field accessors have an object, but we need a handle 1430 h_obj = Handle(current, obj); 1431 } 1432 InstanceKlass* cp_entry_f1 = InstanceKlass::cast(cp_entry->f1_as_klass()); 1433 jfieldID fid = jfieldIDWorkaround::to_jfieldID(cp_entry_f1, cp_entry->f2_as_index(), is_static, is_inlined); 1434 LastFrameAccessor last_frame(current); 1435 JvmtiExport::post_field_access(current, last_frame.method(), last_frame.bcp(), cp_entry_f1, h_obj, fid); 1436 JRT_END 1437 1438 JRT_ENTRY(void, InterpreterRuntime::post_field_modification(JavaThread* current, oopDesc* obj, 1439 ConstantPoolCacheEntry *cp_entry, jvalue *value)) 1440 1441 Klass* k = cp_entry->f1_as_klass(); 1442 1443 // check the access_flags for the field in the klass 1444 InstanceKlass* ik = InstanceKlass::cast(k); 1445 int index = cp_entry->field_index(); 1446 // bail out if field modifications are not watched 1447 if (!ik->field_status(index).is_modification_watched()) return; 1448 1449 char sig_type = '\0'; 1450 1451 switch(cp_entry->flag_state()) { 1452 case btos: sig_type = JVM_SIGNATURE_BYTE; break; 1453 case ztos: sig_type = JVM_SIGNATURE_BOOLEAN; break; 1454 case ctos: sig_type = JVM_SIGNATURE_CHAR; break; 1455 case stos: sig_type = JVM_SIGNATURE_SHORT; break; 1456 case itos: sig_type = JVM_SIGNATURE_INT; break; 1457 case ftos: sig_type = JVM_SIGNATURE_FLOAT; break; 1458 case atos: sig_type = JVM_SIGNATURE_CLASS; break; 1459 case ltos: sig_type = JVM_SIGNATURE_LONG; break; 1460 case dtos: sig_type = JVM_SIGNATURE_DOUBLE; break; 1461 default: ShouldNotReachHere(); return; 1462 } 1463 1464 // Both Q-signatures and L-signatures are mapped to atos 1465 if (cp_entry->flag_state() == atos && ik->field_signature(index)->is_Q_signature()) { 1466 sig_type = JVM_SIGNATURE_PRIMITIVE_OBJECT; 1467 } 1468 1469 bool is_static = (obj == nullptr); 1470 bool is_inlined = cp_entry->is_inlined(); 1471 1472 HandleMark hm(current); 1473 jfieldID fid = jfieldIDWorkaround::to_jfieldID(ik, cp_entry->f2_as_index(), is_static, is_inlined); 1474 jvalue fvalue; 1475 #ifdef _LP64 1476 fvalue = *value; 1477 #else 1478 // Long/double values are stored unaligned and also noncontiguously with 1479 // tagged stacks. We can't just do a simple assignment even in the non- 1480 // J/D cases because a C++ compiler is allowed to assume that a jvalue is 1481 // 8-byte aligned, and interpreter stack slots are only 4-byte aligned. 1482 // We assume that the two halves of longs/doubles are stored in interpreter 1483 // stack slots in platform-endian order. 1484 jlong_accessor u; 1485 jint* newval = (jint*)value; 1486 u.words[0] = newval[0]; 1487 u.words[1] = newval[Interpreter::stackElementWords]; // skip if tag 1488 fvalue.j = u.long_value; 1489 #endif // _LP64 1490 1491 Handle h_obj; 1492 if (!is_static) { 1493 // non-static field accessors have an object, but we need a handle 1494 h_obj = Handle(current, obj); 1495 } 1496 1497 LastFrameAccessor last_frame(current); 1498 JvmtiExport::post_raw_field_modification(current, last_frame.method(), last_frame.bcp(), ik, h_obj, 1499 fid, sig_type, &fvalue); 1500 JRT_END 1501 1502 JRT_ENTRY(void, InterpreterRuntime::post_method_entry(JavaThread* current)) 1503 LastFrameAccessor last_frame(current); 1504 JvmtiExport::post_method_entry(current, last_frame.method(), last_frame.get_frame()); 1505 JRT_END 1506 1507 1508 // This is a JRT_BLOCK_ENTRY because we have to stash away the return oop 1509 // before transitioning to VM, and restore it after transitioning back 1510 // to Java. The return oop at the top-of-stack, is not walked by the GC. 1511 JRT_BLOCK_ENTRY(void, InterpreterRuntime::post_method_exit(JavaThread* current)) 1512 LastFrameAccessor last_frame(current); 1513 JvmtiExport::post_method_exit(current, last_frame.method(), last_frame.get_frame()); 1514 JRT_END 1515 1516 JRT_LEAF(int, InterpreterRuntime::interpreter_contains(address pc)) 1517 { 1518 return (Interpreter::contains(Continuation::get_top_return_pc_post_barrier(JavaThread::current(), pc)) ? 1 : 0); 1519 } 1520 JRT_END 1521 1522 1523 // Implementation of SignatureHandlerLibrary 1524 1525 #ifndef SHARING_FAST_NATIVE_FINGERPRINTS 1526 // Dummy definition (else normalization method is defined in CPU 1527 // dependent code) 1528 uint64_t InterpreterRuntime::normalize_fast_native_fingerprint(uint64_t fingerprint) { 1529 return fingerprint; 1530 } 1531 #endif 1532 1533 address SignatureHandlerLibrary::set_handler_blob() { 1534 BufferBlob* handler_blob = BufferBlob::create("native signature handlers", blob_size); 1535 if (handler_blob == nullptr) { 1536 return nullptr; 1537 } 1538 address handler = handler_blob->code_begin(); 1539 _handler_blob = handler_blob; 1540 _handler = handler; 1541 return handler; 1542 } 1543 1544 void SignatureHandlerLibrary::initialize() { 1545 if (_fingerprints != nullptr) { 1546 return; 1547 } 1548 if (set_handler_blob() == nullptr) { 1549 vm_exit_out_of_memory(blob_size, OOM_MALLOC_ERROR, "native signature handlers"); 1550 } 1551 1552 BufferBlob* bb = BufferBlob::create("Signature Handler Temp Buffer", 1553 SignatureHandlerLibrary::buffer_size); 1554 _buffer = bb->code_begin(); 1555 1556 _fingerprints = new (mtCode) GrowableArray<uint64_t>(32, mtCode); 1557 _handlers = new (mtCode) GrowableArray<address>(32, mtCode); 1558 } 1559 1560 address SignatureHandlerLibrary::set_handler(CodeBuffer* buffer) { 1561 address handler = _handler; 1562 int insts_size = buffer->pure_insts_size(); 1563 if (handler + insts_size > _handler_blob->code_end()) { 1564 // get a new handler blob 1565 handler = set_handler_blob(); 1566 } 1567 if (handler != nullptr) { 1568 memcpy(handler, buffer->insts_begin(), insts_size); 1569 pd_set_handler(handler); 1570 ICache::invalidate_range(handler, insts_size); 1571 _handler = handler + insts_size; 1572 } 1573 return handler; 1574 } 1575 1576 void SignatureHandlerLibrary::add(const methodHandle& method) { 1577 if (method->signature_handler() == nullptr) { 1578 // use slow signature handler if we can't do better 1579 int handler_index = -1; 1580 // check if we can use customized (fast) signature handler 1581 if (UseFastSignatureHandlers && method->size_of_parameters() <= Fingerprinter::fp_max_size_of_parameters) { 1582 // use customized signature handler 1583 MutexLocker mu(SignatureHandlerLibrary_lock); 1584 // make sure data structure is initialized 1585 initialize(); 1586 // lookup method signature's fingerprint 1587 uint64_t fingerprint = Fingerprinter(method).fingerprint(); 1588 // allow CPU dependent code to optimize the fingerprints for the fast handler 1589 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1590 handler_index = _fingerprints->find(fingerprint); 1591 // create handler if necessary 1592 if (handler_index < 0) { 1593 ResourceMark rm; 1594 ptrdiff_t align_offset = align_up(_buffer, CodeEntryAlignment) - (address)_buffer; 1595 CodeBuffer buffer((address)(_buffer + align_offset), 1596 SignatureHandlerLibrary::buffer_size - align_offset); 1597 InterpreterRuntime::SignatureHandlerGenerator(method, &buffer).generate(fingerprint); 1598 // copy into code heap 1599 address handler = set_handler(&buffer); 1600 if (handler == nullptr) { 1601 // use slow signature handler (without memorizing it in the fingerprints) 1602 } else { 1603 // debugging support 1604 if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) { 1605 ttyLocker ttyl; 1606 tty->cr(); 1607 tty->print_cr("argument handler #%d for: %s %s (fingerprint = " UINT64_FORMAT ", %d bytes generated)", 1608 _handlers->length(), 1609 (method->is_static() ? "static" : "receiver"), 1610 method->name_and_sig_as_C_string(), 1611 fingerprint, 1612 buffer.insts_size()); 1613 if (buffer.insts_size() > 0) { 1614 Disassembler::decode(handler, handler + buffer.insts_size(), tty 1615 NOT_PRODUCT(COMMA &buffer.asm_remarks())); 1616 } 1617 #ifndef PRODUCT 1618 address rh_begin = Interpreter::result_handler(method()->result_type()); 1619 if (CodeCache::contains(rh_begin)) { 1620 // else it might be special platform dependent values 1621 tty->print_cr(" --- associated result handler ---"); 1622 address rh_end = rh_begin; 1623 while (*(int*)rh_end != 0) { 1624 rh_end += sizeof(int); 1625 } 1626 Disassembler::decode(rh_begin, rh_end); 1627 } else { 1628 tty->print_cr(" associated result handler: " PTR_FORMAT, p2i(rh_begin)); 1629 } 1630 #endif 1631 } 1632 // add handler to library 1633 _fingerprints->append(fingerprint); 1634 _handlers->append(handler); 1635 // set handler index 1636 assert(_fingerprints->length() == _handlers->length(), "sanity check"); 1637 handler_index = _fingerprints->length() - 1; 1638 } 1639 } 1640 // Set handler under SignatureHandlerLibrary_lock 1641 if (handler_index < 0) { 1642 // use generic signature handler 1643 method->set_signature_handler(Interpreter::slow_signature_handler()); 1644 } else { 1645 // set handler 1646 method->set_signature_handler(_handlers->at(handler_index)); 1647 } 1648 } else { 1649 DEBUG_ONLY(JavaThread::current()->check_possible_safepoint()); 1650 // use generic signature handler 1651 method->set_signature_handler(Interpreter::slow_signature_handler()); 1652 } 1653 } 1654 #ifdef ASSERT 1655 int handler_index = -1; 1656 int fingerprint_index = -2; 1657 { 1658 // '_handlers' and '_fingerprints' are 'GrowableArray's and are NOT synchronized 1659 // in any way if accessed from multiple threads. To avoid races with another 1660 // thread which may change the arrays in the above, mutex protected block, we 1661 // have to protect this read access here with the same mutex as well! 1662 MutexLocker mu(SignatureHandlerLibrary_lock); 1663 if (_handlers != nullptr) { 1664 handler_index = _handlers->find(method->signature_handler()); 1665 uint64_t fingerprint = Fingerprinter(method).fingerprint(); 1666 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1667 fingerprint_index = _fingerprints->find(fingerprint); 1668 } 1669 } 1670 assert(method->signature_handler() == Interpreter::slow_signature_handler() || 1671 handler_index == fingerprint_index, "sanity check"); 1672 #endif // ASSERT 1673 } 1674 1675 void SignatureHandlerLibrary::add(uint64_t fingerprint, address handler) { 1676 int handler_index = -1; 1677 // use customized signature handler 1678 MutexLocker mu(SignatureHandlerLibrary_lock); 1679 // make sure data structure is initialized 1680 initialize(); 1681 fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint); 1682 handler_index = _fingerprints->find(fingerprint); 1683 // create handler if necessary 1684 if (handler_index < 0) { 1685 if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) { 1686 tty->cr(); 1687 tty->print_cr("argument handler #%d at " PTR_FORMAT " for fingerprint " UINT64_FORMAT, 1688 _handlers->length(), 1689 p2i(handler), 1690 fingerprint); 1691 } 1692 _fingerprints->append(fingerprint); 1693 _handlers->append(handler); 1694 } else { 1695 if (PrintSignatureHandlers) { 1696 tty->cr(); 1697 tty->print_cr("duplicate argument handler #%d for fingerprint " UINT64_FORMAT "(old: " PTR_FORMAT ", new : " PTR_FORMAT ")", 1698 _handlers->length(), 1699 fingerprint, 1700 p2i(_handlers->at(handler_index)), 1701 p2i(handler)); 1702 } 1703 } 1704 } 1705 1706 1707 BufferBlob* SignatureHandlerLibrary::_handler_blob = nullptr; 1708 address SignatureHandlerLibrary::_handler = nullptr; 1709 GrowableArray<uint64_t>* SignatureHandlerLibrary::_fingerprints = nullptr; 1710 GrowableArray<address>* SignatureHandlerLibrary::_handlers = nullptr; 1711 address SignatureHandlerLibrary::_buffer = nullptr; 1712 1713 1714 JRT_ENTRY(void, InterpreterRuntime::prepare_native_call(JavaThread* current, Method* method)) 1715 methodHandle m(current, method); 1716 assert(m->is_native(), "sanity check"); 1717 // lookup native function entry point if it doesn't exist 1718 if (!m->has_native_function()) { 1719 NativeLookup::lookup(m, CHECK); 1720 } 1721 // make sure signature handler is installed 1722 SignatureHandlerLibrary::add(m); 1723 // The interpreter entry point checks the signature handler first, 1724 // before trying to fetch the native entry point and klass mirror. 1725 // We must set the signature handler last, so that multiple processors 1726 // preparing the same method will be sure to see non-null entry & mirror. 1727 JRT_END 1728 1729 #if defined(IA32) || defined(AMD64) || defined(ARM) 1730 JRT_LEAF(void, InterpreterRuntime::popframe_move_outgoing_args(JavaThread* current, void* src_address, void* dest_address)) 1731 assert(current == JavaThread::current(), "pre-condition"); 1732 if (src_address == dest_address) { 1733 return; 1734 } 1735 ResourceMark rm; 1736 LastFrameAccessor last_frame(current); 1737 assert(last_frame.is_interpreted_frame(), ""); 1738 jint bci = last_frame.bci(); 1739 methodHandle mh(current, last_frame.method()); 1740 Bytecode_invoke invoke(mh, bci); 1741 ArgumentSizeComputer asc(invoke.signature()); 1742 int size_of_arguments = (asc.size() + (invoke.has_receiver() ? 1 : 0)); // receiver 1743 Copy::conjoint_jbytes(src_address, dest_address, 1744 size_of_arguments * Interpreter::stackElementSize); 1745 JRT_END 1746 #endif 1747 1748 #if INCLUDE_JVMTI 1749 // This is a support of the JVMTI PopFrame interface. 1750 // Make sure it is an invokestatic of a polymorphic intrinsic that has a member_name argument 1751 // and return it as a vm_result so that it can be reloaded in the list of invokestatic parameters. 1752 // The member_name argument is a saved reference (in local#0) to the member_name. 1753 // For backward compatibility with some JDK versions (7, 8) it can also be a direct method handle. 1754 // FIXME: remove DMH case after j.l.i.InvokerBytecodeGenerator code shape is updated. 1755 JRT_ENTRY(void, InterpreterRuntime::member_name_arg_or_null(JavaThread* current, address member_name, 1756 Method* method, address bcp)) 1757 Bytecodes::Code code = Bytecodes::code_at(method, bcp); 1758 if (code != Bytecodes::_invokestatic) { 1759 return; 1760 } 1761 ConstantPool* cpool = method->constants(); 1762 int cp_index = Bytes::get_native_u2(bcp + 1) + ConstantPool::CPCACHE_INDEX_TAG; 1763 Symbol* cname = cpool->klass_name_at(cpool->klass_ref_index_at(cp_index)); 1764 Symbol* mname = cpool->name_ref_at(cp_index); 1765 1766 if (MethodHandles::has_member_arg(cname, mname)) { 1767 oop member_name_oop = cast_to_oop(member_name); 1768 if (java_lang_invoke_DirectMethodHandle::is_instance(member_name_oop)) { 1769 // FIXME: remove after j.l.i.InvokerBytecodeGenerator code shape is updated. 1770 member_name_oop = java_lang_invoke_DirectMethodHandle::member(member_name_oop); 1771 } 1772 current->set_vm_result(member_name_oop); 1773 } else { 1774 current->set_vm_result(nullptr); 1775 } 1776 JRT_END 1777 #endif // INCLUDE_JVMTI 1778 1779 #ifndef PRODUCT 1780 // This must be a JRT_LEAF function because the interpreter must save registers on x86 to 1781 // call this, which changes rsp and makes the interpreter's expression stack not walkable. 1782 // The generated code still uses call_VM because that will set up the frame pointer for 1783 // bcp and method. 1784 JRT_LEAF(intptr_t, InterpreterRuntime::trace_bytecode(JavaThread* current, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2)) 1785 assert(current == JavaThread::current(), "pre-condition"); 1786 LastFrameAccessor last_frame(current); 1787 assert(last_frame.is_interpreted_frame(), "must be an interpreted frame"); 1788 methodHandle mh(current, last_frame.method()); 1789 BytecodeTracer::trace_interpreter(mh, last_frame.bcp(), tos, tos2); 1790 return preserve_this_value; 1791 JRT_END 1792 #endif // !PRODUCT