1 /* 2 * Copyright (c) 2017, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "cds/archiveUtils.hpp" 27 #include "cds/cdsConfig.hpp" 28 #include "classfile/vmSymbols.hpp" 29 #include "code/codeCache.hpp" 30 #include "gc/shared/barrierSet.hpp" 31 #include "gc/shared/collectedHeap.inline.hpp" 32 #include "gc/shared/gcLocker.inline.hpp" 33 #include "interpreter/interpreter.hpp" 34 #include "logging/log.hpp" 35 #include "memory/metaspaceClosure.hpp" 36 #include "memory/metadataFactory.hpp" 37 #include "oops/access.hpp" 38 #include "oops/compressedOops.inline.hpp" 39 #include "oops/fieldStreams.inline.hpp" 40 #include "oops/flatArrayKlass.hpp" 41 #include "oops/inlineKlass.inline.hpp" 42 #include "oops/instanceKlass.inline.hpp" 43 #include "oops/method.hpp" 44 #include "oops/oop.inline.hpp" 45 #include "oops/objArrayKlass.hpp" 46 #include "runtime/fieldDescriptor.inline.hpp" 47 #include "runtime/handles.inline.hpp" 48 #include "runtime/safepointVerifiers.hpp" 49 #include "runtime/sharedRuntime.hpp" 50 #include "runtime/signature.hpp" 51 #include "runtime/thread.inline.hpp" 52 #include "utilities/copy.hpp" 53 54 // Constructor 55 InlineKlass::InlineKlass(const ClassFileParser& parser) 56 : InstanceKlass(parser, InlineKlass::Kind) { 57 set_prototype_header(markWord::inline_type_prototype()); 58 assert(is_inline_klass(), "sanity"); 59 assert(prototype_header().is_inline_type(), "sanity"); 60 } 61 62 InlineKlass::InlineKlass() { 63 assert(CDSConfig::is_dumping_archive() || UseSharedSpaces, "only for CDS"); 64 } 65 66 void InlineKlass::init_fixed_block() { 67 _adr_inlineklass_fixed_block = inlineklass_static_block(); 68 // Addresses used for inline type calling convention 69 *((Array<SigEntry>**)adr_extended_sig()) = nullptr; 70 *((Array<VMRegPair>**)adr_return_regs()) = nullptr; 71 *((address*)adr_pack_handler()) = nullptr; 72 *((address*)adr_pack_handler_jobject()) = nullptr; 73 *((address*)adr_unpack_handler()) = nullptr; 74 assert(pack_handler() == nullptr, "pack handler not null"); 75 *((int*)adr_default_value_offset()) = 0; 76 *((address*)adr_value_array_klasses()) = nullptr; 77 } 78 79 oop InlineKlass::default_value() { 80 assert(is_initialized() || is_being_initialized() || is_in_error_state(), "default value is set at the beginning of initialization"); 81 oop val = java_mirror()->obj_field_acquire(default_value_offset()); 82 assert(val != nullptr, "Sanity check"); 83 assert(oopDesc::is_oop(val), "Sanity check"); 84 assert(val->is_inline_type(), "Sanity check"); 85 assert(val->klass() == this, "sanity check"); 86 return val; 87 } 88 89 int InlineKlass::first_field_offset_old() { 90 #ifdef ASSERT 91 int first_offset = INT_MAX; 92 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 93 if (fs.offset() < first_offset) first_offset= fs.offset(); 94 } 95 #endif 96 int base_offset = instanceOopDesc::base_offset_in_bytes(); 97 // The first field of line types is aligned on a long boundary 98 base_offset = align_up(base_offset, BytesPerLong); 99 assert(base_offset == first_offset, "inconsistent offsets"); 100 return base_offset; 101 } 102 103 instanceOop InlineKlass::allocate_instance(TRAPS) { 104 int size = size_helper(); // Query before forming handle. 105 106 instanceOop oop = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL); 107 assert(oop->mark().is_inline_type(), "Expected inline type"); 108 return oop; 109 } 110 111 instanceOop InlineKlass::allocate_instance_buffer(TRAPS) { 112 int size = size_helper(); // Query before forming handle. 113 114 instanceOop oop = (instanceOop)Universe::heap()->obj_buffer_allocate(this, size, CHECK_NULL); 115 assert(oop->mark().is_inline_type(), "Expected inline type"); 116 return oop; 117 } 118 119 int InlineKlass::nonstatic_oop_count() { 120 int oops = 0; 121 int map_count = nonstatic_oop_map_count(); 122 OopMapBlock* block = start_of_nonstatic_oop_maps(); 123 OopMapBlock* end = block + map_count; 124 while (block != end) { 125 oops += block->count(); 126 block++; 127 } 128 return oops; 129 } 130 131 oop InlineKlass::read_flat_field(oop obj, int offset, TRAPS) { 132 oop res = nullptr; 133 assert(is_initialized() || is_being_initialized()|| is_in_error_state(), 134 "Must be initialized, initializing or in a corner case of an escaped instance of a class that failed its initialization"); 135 if (is_empty_inline_type()) { 136 res = (instanceOop)default_value(); 137 } else { 138 Handle obj_h(THREAD, obj); 139 res = allocate_instance_buffer(CHECK_NULL); 140 inline_copy_payload_to_new_oop(((char*)(oopDesc*)obj_h()) + offset, res); 141 } 142 assert(res != nullptr, "Must be set in one of two paths above"); 143 return res; 144 } 145 146 void InlineKlass::write_flat_field(oop obj, int offset, oop value, TRAPS) { 147 if (value == nullptr) { 148 THROW(vmSymbols::java_lang_NullPointerException()); 149 } 150 write_non_null_flat_field(obj, offset, value); 151 } 152 153 void InlineKlass::write_non_null_flat_field(oop obj, int offset, oop value) { 154 assert(value != nullptr, ""); 155 if (!is_empty_inline_type()) { 156 inline_copy_oop_to_payload(value, ((char*)(oopDesc*)obj) + offset); 157 } 158 } 159 160 // Arrays of... 161 162 bool InlineKlass::flat_array() { 163 if (!UseFlatArray) { 164 return false; 165 } 166 // Too big 167 int elem_bytes = get_payload_size_in_bytes(); 168 if ((FlatArrayElementMaxSize >= 0) && (elem_bytes > FlatArrayElementMaxSize)) { 169 return false; 170 } 171 // Too many embedded oops 172 if ((FlatArrayElementMaxOops >= 0) && (nonstatic_oop_count() > FlatArrayElementMaxOops)) { 173 return false; 174 } 175 // Declared atomic but not naturally atomic. 176 if (must_be_atomic() && !is_naturally_atomic()) { 177 return false; 178 } 179 // VM enforcing InlineArrayAtomicAccess only... 180 if (InlineArrayAtomicAccess && (!is_naturally_atomic())) { 181 return false; 182 } 183 return true; 184 } 185 186 Klass* InlineKlass::value_array_klass(int n, TRAPS) { 187 if (Atomic::load_acquire(adr_value_array_klasses()) == nullptr) { 188 // Atomic creation of array_klasses 189 RecursiveLocker rl(MultiArray_lock, THREAD); 190 191 // Check if update has already taken place 192 if (value_array_klasses() == nullptr) { 193 ArrayKlass* k; 194 if (flat_array()) { 195 k = FlatArrayKlass::allocate_klass(this, CHECK_NULL); 196 } else { 197 k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, true, CHECK_NULL); 198 199 } 200 // use 'release' to pair with lock-free load 201 Atomic::release_store(adr_value_array_klasses(), k); 202 } 203 } 204 ArrayKlass* ak = value_array_klasses(); 205 return ak->array_klass(n, THREAD); 206 } 207 208 Klass* InlineKlass::value_array_klass_or_null(int n) { 209 // Need load-acquire for lock-free read 210 ArrayKlass* ak = Atomic::load_acquire(adr_value_array_klasses()); 211 if (ak == nullptr) { 212 return nullptr; 213 } else { 214 return ak->array_klass_or_null(n); 215 } 216 } 217 218 Klass* InlineKlass::value_array_klass(TRAPS) { 219 return value_array_klass(1, THREAD); 220 } 221 222 Klass* InlineKlass::value_array_klass_or_null() { 223 return value_array_klass_or_null(1); 224 } 225 226 // Inline type arguments are not passed by reference, instead each 227 // field of the inline type is passed as an argument. This helper 228 // function collects the flat field (recursively) 229 // in a list. Included with the field's type is 230 // the offset of each field in the inline type: i2c and c2i adapters 231 // need that to load or store fields. Finally, the list of fields is 232 // sorted in order of increasing offsets: the adapters and the 233 // compiled code need to agree upon the order of fields. 234 // 235 // The list of basic types that is returned starts with a T_METADATA 236 // and ends with an extra T_VOID. T_METADATA/T_VOID pairs are used as 237 // delimiters. Every entry between the two is a field of the inline 238 // type. If there's an embedded inline type in the list, it also starts 239 // with a T_METADATA and ends with a T_VOID. This is so we can 240 // generate a unique fingerprint for the method's adapters and we can 241 // generate the list of basic types from the interpreter point of view 242 // (inline types passed as reference: iterate on the list until a 243 // T_METADATA, drop everything until and including the closing 244 // T_VOID) or the compiler point of view (each field of the inline 245 // types is an argument: drop all T_METADATA/T_VOID from the list). 246 // 247 // Value classes could also have fields in abstract super value classes. 248 // Use a HierarchicalFieldStream to get them as well. 249 int InlineKlass::collect_fields(GrowableArray<SigEntry>* sig, int base_off) { 250 int count = 0; 251 SigEntry::add_entry(sig, T_METADATA, name(), base_off); 252 for (HierarchicalFieldStream<JavaFieldStream> fs(this); !fs.done(); fs.next()) { 253 if (fs.access_flags().is_static()) continue; 254 int offset = base_off + fs.offset() - (base_off > 0 ? first_field_offset() : 0); 255 // TODO 8284443 Use different heuristic to decide what should be scalarized in the calling convention 256 if (fs.is_flat()) { 257 // Resolve klass of flat field and recursively collect fields 258 Klass* vk = get_inline_type_field_klass(fs.index()); 259 count += InlineKlass::cast(vk)->collect_fields(sig, offset); 260 } else { 261 BasicType bt = Signature::basic_type(fs.signature()); 262 SigEntry::add_entry(sig, bt, fs.signature(), offset); 263 count += type2size[bt]; 264 } 265 } 266 int offset = base_off + size_helper()*HeapWordSize - (base_off > 0 ? first_field_offset() : 0); 267 SigEntry::add_entry(sig, T_VOID, name(), offset); 268 if (base_off == 0) { 269 sig->sort(SigEntry::compare); 270 } 271 assert(sig->at(0)._bt == T_METADATA && sig->at(sig->length()-1)._bt == T_VOID, "broken structure"); 272 return count; 273 } 274 275 void InlineKlass::initialize_calling_convention(TRAPS) { 276 // Because the pack and unpack handler addresses need to be loadable from generated code, 277 // they are stored at a fixed offset in the klass metadata. Since inline type klasses do 278 // not have a vtable, the vtable offset is used to store these addresses. 279 if (InlineTypeReturnedAsFields || InlineTypePassFieldsAsArgs) { 280 ResourceMark rm; 281 GrowableArray<SigEntry> sig_vk; 282 int nb_fields = collect_fields(&sig_vk); 283 Array<SigEntry>* extended_sig = MetadataFactory::new_array<SigEntry>(class_loader_data(), sig_vk.length(), CHECK); 284 *((Array<SigEntry>**)adr_extended_sig()) = extended_sig; 285 for (int i = 0; i < sig_vk.length(); i++) { 286 extended_sig->at_put(i, sig_vk.at(i)); 287 } 288 if (can_be_returned_as_fields(/* init= */ true)) { 289 nb_fields++; 290 BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nb_fields); 291 sig_bt[0] = T_METADATA; 292 SigEntry::fill_sig_bt(&sig_vk, sig_bt+1); 293 VMRegPair* regs = NEW_RESOURCE_ARRAY(VMRegPair, nb_fields); 294 int total = SharedRuntime::java_return_convention(sig_bt, regs, nb_fields); 295 296 if (total > 0) { 297 Array<VMRegPair>* return_regs = MetadataFactory::new_array<VMRegPair>(class_loader_data(), nb_fields, CHECK); 298 *((Array<VMRegPair>**)adr_return_regs()) = return_regs; 299 for (int i = 0; i < nb_fields; i++) { 300 return_regs->at_put(i, regs[i]); 301 } 302 303 BufferedInlineTypeBlob* buffered_blob = SharedRuntime::generate_buffered_inline_type_adapter(this); 304 *((address*)adr_pack_handler()) = buffered_blob->pack_fields(); 305 *((address*)adr_pack_handler_jobject()) = buffered_blob->pack_fields_jobject(); 306 *((address*)adr_unpack_handler()) = buffered_blob->unpack_fields(); 307 assert(CodeCache::find_blob(pack_handler()) == buffered_blob, "lost track of blob"); 308 assert(can_be_returned_as_fields(), "sanity"); 309 } 310 } 311 if (!can_be_returned_as_fields() && !can_be_passed_as_fields()) { 312 MetadataFactory::free_array<SigEntry>(class_loader_data(), extended_sig); 313 assert(return_regs() == nullptr, "sanity"); 314 } 315 } 316 } 317 318 void InlineKlass::deallocate_contents(ClassLoaderData* loader_data) { 319 if (extended_sig() != nullptr) { 320 MetadataFactory::free_array<SigEntry>(loader_data, extended_sig()); 321 *((Array<SigEntry>**)adr_extended_sig()) = nullptr; 322 } 323 if (return_regs() != nullptr) { 324 MetadataFactory::free_array<VMRegPair>(loader_data, return_regs()); 325 *((Array<VMRegPair>**)adr_return_regs()) = nullptr; 326 } 327 cleanup_blobs(); 328 InstanceKlass::deallocate_contents(loader_data); 329 } 330 331 void InlineKlass::cleanup(InlineKlass* ik) { 332 ik->cleanup_blobs(); 333 } 334 335 void InlineKlass::cleanup_blobs() { 336 if (pack_handler() != nullptr) { 337 CodeBlob* buffered_blob = CodeCache::find_blob(pack_handler()); 338 assert(buffered_blob->is_buffered_inline_type_blob(), "bad blob type"); 339 BufferBlob::free((BufferBlob*)buffered_blob); 340 *((address*)adr_pack_handler()) = nullptr; 341 *((address*)adr_pack_handler_jobject()) = nullptr; 342 *((address*)adr_unpack_handler()) = nullptr; 343 } 344 } 345 346 // Can this inline type be passed as multiple values? 347 bool InlineKlass::can_be_passed_as_fields() const { 348 return InlineTypePassFieldsAsArgs; 349 } 350 351 // Can this inline type be returned as multiple values? 352 bool InlineKlass::can_be_returned_as_fields(bool init) const { 353 return InlineTypeReturnedAsFields && (init || return_regs() != nullptr); 354 } 355 356 // Create handles for all oop fields returned in registers that are going to be live across a safepoint 357 void InlineKlass::save_oop_fields(const RegisterMap& reg_map, GrowableArray<Handle>& handles) const { 358 Thread* thread = Thread::current(); 359 const Array<SigEntry>* sig_vk = extended_sig(); 360 const Array<VMRegPair>* regs = return_regs(); 361 int j = 1; 362 363 for (int i = 0; i < sig_vk->length(); i++) { 364 BasicType bt = sig_vk->at(i)._bt; 365 if (bt == T_OBJECT || bt == T_ARRAY) { 366 VMRegPair pair = regs->at(j); 367 address loc = reg_map.location(pair.first(), nullptr); 368 oop v = *(oop*)loc; 369 assert(v == nullptr || oopDesc::is_oop(v), "not an oop?"); 370 assert(Universe::heap()->is_in_or_null(v), "must be heap pointer"); 371 handles.push(Handle(thread, v)); 372 } 373 if (bt == T_METADATA) { 374 continue; 375 } 376 if (bt == T_VOID && 377 sig_vk->at(i-1)._bt != T_LONG && 378 sig_vk->at(i-1)._bt != T_DOUBLE) { 379 continue; 380 } 381 j++; 382 } 383 assert(j == regs->length(), "missed a field?"); 384 } 385 386 // Update oop fields in registers from handles after a safepoint 387 void InlineKlass::restore_oop_results(RegisterMap& reg_map, GrowableArray<Handle>& handles) const { 388 assert(InlineTypeReturnedAsFields, "Inline types should never be returned as fields"); 389 const Array<SigEntry>* sig_vk = extended_sig(); 390 const Array<VMRegPair>* regs = return_regs(); 391 assert(regs != nullptr, "inconsistent"); 392 393 int j = 1; 394 for (int i = 0, k = 0; i < sig_vk->length(); i++) { 395 BasicType bt = sig_vk->at(i)._bt; 396 if (bt == T_OBJECT || bt == T_ARRAY) { 397 VMRegPair pair = regs->at(j); 398 address loc = reg_map.location(pair.first(), nullptr); 399 *(oop*)loc = handles.at(k++)(); 400 } 401 if (bt == T_METADATA) { 402 continue; 403 } 404 if (bt == T_VOID && 405 sig_vk->at(i-1)._bt != T_LONG && 406 sig_vk->at(i-1)._bt != T_DOUBLE) { 407 continue; 408 } 409 j++; 410 } 411 assert(j == regs->length(), "missed a field?"); 412 } 413 414 // Fields are in registers. Create an instance of the inline type and 415 // initialize it with the values of the fields. 416 oop InlineKlass::realloc_result(const RegisterMap& reg_map, const GrowableArray<Handle>& handles, TRAPS) { 417 oop new_vt = allocate_instance(CHECK_NULL); 418 const Array<SigEntry>* sig_vk = extended_sig(); 419 const Array<VMRegPair>* regs = return_regs(); 420 421 int j = 1; 422 int k = 0; 423 for (int i = 0; i < sig_vk->length(); i++) { 424 BasicType bt = sig_vk->at(i)._bt; 425 if (bt == T_METADATA) { 426 continue; 427 } 428 if (bt == T_VOID) { 429 if (sig_vk->at(i-1)._bt == T_LONG || 430 sig_vk->at(i-1)._bt == T_DOUBLE) { 431 j++; 432 } 433 continue; 434 } 435 int off = sig_vk->at(i)._offset; 436 assert(off > 0, "offset in object should be positive"); 437 VMRegPair pair = regs->at(j); 438 address loc = reg_map.location(pair.first(), nullptr); 439 switch(bt) { 440 case T_BOOLEAN: { 441 new_vt->bool_field_put(off, *(jboolean*)loc); 442 break; 443 } 444 case T_CHAR: { 445 new_vt->char_field_put(off, *(jchar*)loc); 446 break; 447 } 448 case T_BYTE: { 449 new_vt->byte_field_put(off, *(jbyte*)loc); 450 break; 451 } 452 case T_SHORT: { 453 new_vt->short_field_put(off, *(jshort*)loc); 454 break; 455 } 456 case T_INT: { 457 new_vt->int_field_put(off, *(jint*)loc); 458 break; 459 } 460 case T_LONG: { 461 #ifdef _LP64 462 new_vt->double_field_put(off, *(jdouble*)loc); 463 #else 464 Unimplemented(); 465 #endif 466 break; 467 } 468 case T_OBJECT: 469 case T_ARRAY: { 470 Handle handle = handles.at(k++); 471 new_vt->obj_field_put(off, handle()); 472 break; 473 } 474 case T_FLOAT: { 475 new_vt->float_field_put(off, *(jfloat*)loc); 476 break; 477 } 478 case T_DOUBLE: { 479 new_vt->double_field_put(off, *(jdouble*)loc); 480 break; 481 } 482 default: 483 ShouldNotReachHere(); 484 } 485 *(intptr_t*)loc = 0xDEAD; 486 j++; 487 } 488 assert(j == regs->length(), "missed a field?"); 489 assert(k == handles.length(), "missed an oop?"); 490 return new_vt; 491 } 492 493 // Check the return register for an InlineKlass oop 494 InlineKlass* InlineKlass::returned_inline_klass(const RegisterMap& map) { 495 BasicType bt = T_METADATA; 496 VMRegPair pair; 497 int nb = SharedRuntime::java_return_convention(&bt, &pair, 1); 498 assert(nb == 1, "broken"); 499 500 address loc = map.location(pair.first(), nullptr); 501 intptr_t ptr = *(intptr_t*)loc; 502 if (is_set_nth_bit(ptr, 0)) { 503 // Return value is tagged, must be an InlineKlass pointer 504 clear_nth_bit(ptr, 0); 505 assert(Metaspace::contains((void*)ptr), "should be klass"); 506 InlineKlass* vk = (InlineKlass*)ptr; 507 assert(vk->can_be_returned_as_fields(), "must be able to return as fields"); 508 return vk; 509 } 510 // Return value is not tagged, must be a valid oop 511 assert(oopDesc::is_oop_or_null(cast_to_oop(ptr), true), 512 "Bad oop return: " PTR_FORMAT, ptr); 513 return nullptr; 514 } 515 516 // CDS support 517 518 void InlineKlass::metaspace_pointers_do(MetaspaceClosure* it) { 519 InstanceKlass::metaspace_pointers_do(it); 520 521 InlineKlass* this_ptr = this; 522 it->push((Klass**)adr_value_array_klasses()); 523 } 524 525 void InlineKlass::remove_unshareable_info() { 526 InstanceKlass::remove_unshareable_info(); 527 528 // update it to point to the "buffered" copy of this class. 529 _adr_inlineklass_fixed_block = inlineklass_static_block(); 530 ArchivePtrMarker::mark_pointer((address*)&_adr_inlineklass_fixed_block); 531 532 *((Array<SigEntry>**)adr_extended_sig()) = nullptr; 533 *((Array<VMRegPair>**)adr_return_regs()) = nullptr; 534 *((address*)adr_pack_handler()) = nullptr; 535 *((address*)adr_pack_handler_jobject()) = nullptr; 536 *((address*)adr_unpack_handler()) = nullptr; 537 assert(pack_handler() == nullptr, "pack handler not null"); 538 if (value_array_klasses() != nullptr) { 539 value_array_klasses()->remove_unshareable_info(); 540 } 541 } 542 543 void InlineKlass::remove_java_mirror() { 544 InstanceKlass::remove_java_mirror(); 545 if (value_array_klasses() != nullptr) { 546 value_array_klasses()->remove_java_mirror(); 547 } 548 } 549 550 void InlineKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, PackageEntry* pkg_entry, TRAPS) { 551 InstanceKlass::restore_unshareable_info(loader_data, protection_domain, pkg_entry, CHECK); 552 if (value_array_klasses() != nullptr) { 553 value_array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 554 } 555 } 556 557 // oop verify 558 559 void InlineKlass::verify_on(outputStream* st) { 560 InstanceKlass::verify_on(st); 561 guarantee(prototype_header().is_inline_type(), "Prototype header is not inline type"); 562 } 563 564 void InlineKlass::oop_verify_on(oop obj, outputStream* st) { 565 InstanceKlass::oop_verify_on(obj, st); 566 guarantee(obj->mark().is_inline_type(), "Header is not inline type"); 567 }