1 /* 2 * Copyright (c) 2005, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "ci/ciArrayKlass.hpp" 27 #include "ci/ciEnv.hpp" 28 #include "ci/ciKlass.hpp" 29 #include "ci/ciMethod.hpp" 30 #include "classfile/javaClasses.inline.hpp" 31 #include "classfile/vmClasses.hpp" 32 #include "code/dependencies.hpp" 33 #include "compiler/compileLog.hpp" 34 #include "compiler/compileBroker.hpp" 35 #include "compiler/compileTask.hpp" 36 #include "memory/resourceArea.hpp" 37 #include "oops/klass.hpp" 38 #include "oops/oop.inline.hpp" 39 #include "oops/method.inline.hpp" 40 #include "oops/objArrayKlass.hpp" 41 #include "runtime/flags/flagSetting.hpp" 42 #include "runtime/handles.hpp" 43 #include "runtime/handles.inline.hpp" 44 #include "runtime/javaThread.inline.hpp" 45 #include "runtime/jniHandles.inline.hpp" 46 #include "runtime/mutexLocker.hpp" 47 #include "runtime/perfData.hpp" 48 #include "runtime/vmThread.hpp" 49 #include "utilities/copy.hpp" 50 51 52 #ifdef ASSERT 53 static bool must_be_in_vm() { 54 Thread* thread = Thread::current(); 55 if (thread->is_Java_thread()) { 56 return JavaThread::cast(thread)->thread_state() == _thread_in_vm; 57 } else { 58 return true; // Could be VMThread or GC thread 59 } 60 } 61 #endif //ASSERT 62 63 bool Dependencies::_verify_in_progress = false; // don't -Xlog:dependencies 64 65 void Dependencies::initialize(ciEnv* env) { 66 Arena* arena = env->arena(); 67 _oop_recorder = env->oop_recorder(); 68 _log = env->log(); 69 _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0); 70 #if INCLUDE_JVMCI 71 _using_dep_values = false; 72 #endif 73 DEBUG_ONLY(_deps[end_marker] = nullptr); 74 for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) { 75 _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, nullptr); 76 } 77 _content_bytes = nullptr; 78 _size_in_bytes = (size_t)-1; 79 80 assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity"); 81 } 82 83 void Dependencies::assert_evol_method(ciMethod* m) { 84 assert_common_1(evol_method, m); 85 } 86 87 void Dependencies::assert_leaf_type(ciKlass* ctxk) { 88 if (ctxk->is_array_klass()) { 89 // As a special case, support this assertion on an array type, 90 // which reduces to an assertion on its element type. 91 // Note that this cannot be done with assertions that 92 // relate to concreteness or abstractness. 93 ciType* elemt = ctxk->as_array_klass()->base_element_type(); 94 if (!elemt->is_instance_klass()) return; // Ex: int[][] 95 ctxk = elemt->as_instance_klass(); 96 //if (ctxk->is_final()) return; // Ex: String[][] 97 } 98 check_ctxk(ctxk); 99 assert_common_1(leaf_type, ctxk); 100 } 101 102 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) { 103 check_ctxk_abstract(ctxk); 104 assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck); 105 } 106 107 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) { 108 check_ctxk(ctxk); 109 check_unique_method(ctxk, uniqm); 110 assert_common_2(unique_concrete_method_2, ctxk, uniqm); 111 } 112 113 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm, ciKlass* resolved_klass, ciMethod* resolved_method) { 114 check_ctxk(ctxk); 115 check_unique_method(ctxk, uniqm); 116 assert_common_4(unique_concrete_method_4, ctxk, uniqm, resolved_klass, resolved_method); 117 } 118 119 void Dependencies::assert_unique_implementor(ciInstanceKlass* ctxk, ciInstanceKlass* uniqk) { 120 check_ctxk(ctxk); 121 check_unique_implementor(ctxk, uniqk); 122 assert_common_2(unique_implementor, ctxk, uniqk); 123 } 124 125 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) { 126 check_ctxk(ctxk); 127 assert_common_1(no_finalizable_subclasses, ctxk); 128 } 129 130 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) { 131 assert_common_2(call_site_target_value, call_site, method_handle); 132 } 133 134 #if INCLUDE_JVMCI 135 136 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) { 137 _oop_recorder = oop_recorder; 138 _log = log; 139 _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0); 140 _using_dep_values = true; 141 DEBUG_ONLY(_dep_values[end_marker] = nullptr); 142 for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) { 143 _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue()); 144 } 145 _content_bytes = nullptr; 146 _size_in_bytes = (size_t)-1; 147 148 assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity"); 149 } 150 151 void Dependencies::assert_evol_method(Method* m) { 152 assert_common_1(evol_method, DepValue(_oop_recorder, m)); 153 } 154 155 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) { 156 check_ctxk(ctxk); 157 assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk)); 158 } 159 160 void Dependencies::assert_leaf_type(Klass* ctxk) { 161 if (ctxk->is_array_klass()) { 162 // As a special case, support this assertion on an array type, 163 // which reduces to an assertion on its element type. 164 // Note that this cannot be done with assertions that 165 // relate to concreteness or abstractness. 166 BasicType elemt = ArrayKlass::cast(ctxk)->element_type(); 167 if (is_java_primitive(elemt)) return; // Ex: int[][] 168 ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass(); 169 //if (ctxk->is_final()) return; // Ex: String[][] 170 } 171 check_ctxk(ctxk); 172 assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk)); 173 } 174 175 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) { 176 check_ctxk_abstract(ctxk); 177 DepValue ctxk_dv(_oop_recorder, ctxk); 178 DepValue conck_dv(_oop_recorder, conck, &ctxk_dv); 179 assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv); 180 } 181 182 void Dependencies::assert_unique_implementor(InstanceKlass* ctxk, InstanceKlass* uniqk) { 183 check_ctxk(ctxk); 184 assert(ctxk->is_interface(), "not an interface"); 185 assert(ctxk->implementor() == uniqk, "not a unique implementor"); 186 assert_common_2(unique_implementor, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqk)); 187 } 188 189 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) { 190 check_ctxk(ctxk); 191 check_unique_method(ctxk, uniqm); 192 assert_common_2(unique_concrete_method_2, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm)); 193 } 194 195 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) { 196 assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle))); 197 } 198 199 #endif // INCLUDE_JVMCI 200 201 202 // Helper function. If we are adding a new dep. under ctxk2, 203 // try to find an old dep. under a broader* ctxk1. If there is 204 // 205 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps, 206 int ctxk_i, ciKlass* ctxk2) { 207 ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass(); 208 if (ctxk2->is_subtype_of(ctxk1)) { 209 return true; // success, and no need to change 210 } else if (ctxk1->is_subtype_of(ctxk2)) { 211 // new context class fully subsumes previous one 212 deps->at_put(ctxk_i, ctxk2); 213 return true; 214 } else { 215 return false; 216 } 217 } 218 219 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) { 220 assert(dep_args(dept) == 1, "sanity"); 221 log_dependency(dept, x); 222 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 223 224 // see if the same (or a similar) dep is already recorded 225 if (note_dep_seen(dept, x)) { 226 assert(deps->find(x) >= 0, "sanity"); 227 } else { 228 deps->append(x); 229 } 230 } 231 232 void Dependencies::assert_common_2(DepType dept, 233 ciBaseObject* x0, ciBaseObject* x1) { 234 assert(dep_args(dept) == 2, "sanity"); 235 log_dependency(dept, x0, x1); 236 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 237 238 // see if the same (or a similar) dep is already recorded 239 bool has_ctxk = has_explicit_context_arg(dept); 240 if (has_ctxk) { 241 assert(dep_context_arg(dept) == 0, "sanity"); 242 if (note_dep_seen(dept, x1)) { 243 // look in this bucket for redundant assertions 244 const int stride = 2; 245 for (int i = deps->length(); (i -= stride) >= 0; ) { 246 ciBaseObject* y1 = deps->at(i+1); 247 if (x1 == y1) { // same subject; check the context 248 if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) { 249 return; 250 } 251 } 252 } 253 } 254 } else { 255 bool dep_seen_x0 = note_dep_seen(dept, x0); // records x0 for future queries 256 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries 257 if (dep_seen_x0 && dep_seen_x1) { 258 // look in this bucket for redundant assertions 259 const int stride = 2; 260 for (int i = deps->length(); (i -= stride) >= 0; ) { 261 ciBaseObject* y0 = deps->at(i+0); 262 ciBaseObject* y1 = deps->at(i+1); 263 if (x0 == y0 && x1 == y1) { 264 return; 265 } 266 } 267 } 268 } 269 270 // append the assertion in the correct bucket: 271 deps->append(x0); 272 deps->append(x1); 273 } 274 275 void Dependencies::assert_common_4(DepType dept, 276 ciKlass* ctxk, ciBaseObject* x1, ciBaseObject* x2, ciBaseObject* x3) { 277 assert(has_explicit_context_arg(dept), "sanity"); 278 assert(dep_context_arg(dept) == 0, "sanity"); 279 assert(dep_args(dept) == 4, "sanity"); 280 log_dependency(dept, ctxk, x1, x2, x3); 281 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 282 283 // see if the same (or a similar) dep is already recorded 284 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries 285 bool dep_seen_x2 = note_dep_seen(dept, x2); // records x2 for future queries 286 bool dep_seen_x3 = note_dep_seen(dept, x3); // records x3 for future queries 287 if (dep_seen_x1 && dep_seen_x2 && dep_seen_x3) { 288 // look in this bucket for redundant assertions 289 const int stride = 4; 290 for (int i = deps->length(); (i -= stride) >= 0; ) { 291 ciBaseObject* y1 = deps->at(i+1); 292 ciBaseObject* y2 = deps->at(i+2); 293 ciBaseObject* y3 = deps->at(i+3); 294 if (x1 == y1 && x2 == y2 && x3 == y3) { // same subjects; check the context 295 if (maybe_merge_ctxk(deps, i+0, ctxk)) { 296 return; 297 } 298 } 299 } 300 } 301 // append the assertion in the correct bucket: 302 deps->append(ctxk); 303 deps->append(x1); 304 deps->append(x2); 305 deps->append(x3); 306 } 307 308 #if INCLUDE_JVMCI 309 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps, 310 int ctxk_i, DepValue ctxk2_dv) { 311 Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder); 312 Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder); 313 if (ctxk2->is_subtype_of(ctxk1)) { 314 return true; // success, and no need to change 315 } else if (ctxk1->is_subtype_of(ctxk2)) { 316 // new context class fully subsumes previous one 317 deps->at_put(ctxk_i, ctxk2_dv); 318 return true; 319 } else { 320 return false; 321 } 322 } 323 324 void Dependencies::assert_common_1(DepType dept, DepValue x) { 325 assert(dep_args(dept) == 1, "sanity"); 326 //log_dependency(dept, x); 327 GrowableArray<DepValue>* deps = _dep_values[dept]; 328 329 // see if the same (or a similar) dep is already recorded 330 if (note_dep_seen(dept, x)) { 331 assert(deps->find(x) >= 0, "sanity"); 332 } else { 333 deps->append(x); 334 } 335 } 336 337 void Dependencies::assert_common_2(DepType dept, 338 DepValue x0, DepValue x1) { 339 assert(dep_args(dept) == 2, "sanity"); 340 //log_dependency(dept, x0, x1); 341 GrowableArray<DepValue>* deps = _dep_values[dept]; 342 343 // see if the same (or a similar) dep is already recorded 344 bool has_ctxk = has_explicit_context_arg(dept); 345 if (has_ctxk) { 346 assert(dep_context_arg(dept) == 0, "sanity"); 347 if (note_dep_seen(dept, x1)) { 348 // look in this bucket for redundant assertions 349 const int stride = 2; 350 for (int i = deps->length(); (i -= stride) >= 0; ) { 351 DepValue y1 = deps->at(i+1); 352 if (x1 == y1) { // same subject; check the context 353 if (maybe_merge_ctxk(deps, i+0, x0)) { 354 return; 355 } 356 } 357 } 358 } 359 } else { 360 bool dep_seen_x0 = note_dep_seen(dept, x0); // records x0 for future queries 361 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries 362 if (dep_seen_x0 && dep_seen_x1) { 363 // look in this bucket for redundant assertions 364 const int stride = 2; 365 for (int i = deps->length(); (i -= stride) >= 0; ) { 366 DepValue y0 = deps->at(i+0); 367 DepValue y1 = deps->at(i+1); 368 if (x0 == y0 && x1 == y1) { 369 return; 370 } 371 } 372 } 373 } 374 375 // append the assertion in the correct bucket: 376 deps->append(x0); 377 deps->append(x1); 378 } 379 #endif // INCLUDE_JVMCI 380 381 /// Support for encoding dependencies into an nmethod: 382 383 void Dependencies::copy_to(nmethod* nm) { 384 address beg = nm->dependencies_begin(); 385 address end = nm->dependencies_end(); 386 guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing"); 387 (void)memcpy(beg, content_bytes(), size_in_bytes()); 388 assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words"); 389 } 390 391 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) { 392 for (int i = 0; i < narg; i++) { 393 int diff = p1[i]->ident() - p2[i]->ident(); 394 if (diff != 0) return diff; 395 } 396 return 0; 397 } 398 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2) 399 { return sort_dep(p1, p2, 1); } 400 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2) 401 { return sort_dep(p1, p2, 2); } 402 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2) 403 { return sort_dep(p1, p2, 3); } 404 static int sort_dep_arg_4(ciBaseObject** p1, ciBaseObject** p2) 405 { return sort_dep(p1, p2, 4); } 406 407 #if INCLUDE_JVMCI 408 // metadata deps are sorted before object deps 409 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) { 410 for (int i = 0; i < narg; i++) { 411 int diff = p1[i].sort_key() - p2[i].sort_key(); 412 if (diff != 0) return diff; 413 } 414 return 0; 415 } 416 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2) 417 { return sort_dep_value(p1, p2, 1); } 418 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2) 419 { return sort_dep_value(p1, p2, 2); } 420 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2) 421 { return sort_dep_value(p1, p2, 3); } 422 #endif // INCLUDE_JVMCI 423 424 void Dependencies::sort_all_deps() { 425 #if INCLUDE_JVMCI 426 if (_using_dep_values) { 427 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 428 DepType dept = (DepType)deptv; 429 GrowableArray<DepValue>* deps = _dep_values[dept]; 430 if (deps->length() <= 1) continue; 431 switch (dep_args(dept)) { 432 case 1: deps->sort(sort_dep_value_arg_1, 1); break; 433 case 2: deps->sort(sort_dep_value_arg_2, 2); break; 434 case 3: deps->sort(sort_dep_value_arg_3, 3); break; 435 default: ShouldNotReachHere(); break; 436 } 437 } 438 return; 439 } 440 #endif // INCLUDE_JVMCI 441 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 442 DepType dept = (DepType)deptv; 443 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 444 if (deps->length() <= 1) continue; 445 switch (dep_args(dept)) { 446 case 1: deps->sort(sort_dep_arg_1, 1); break; 447 case 2: deps->sort(sort_dep_arg_2, 2); break; 448 case 3: deps->sort(sort_dep_arg_3, 3); break; 449 case 4: deps->sort(sort_dep_arg_4, 4); break; 450 default: ShouldNotReachHere(); break; 451 } 452 } 453 } 454 455 size_t Dependencies::estimate_size_in_bytes() { 456 size_t est_size = 100; 457 #if INCLUDE_JVMCI 458 if (_using_dep_values) { 459 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 460 DepType dept = (DepType)deptv; 461 GrowableArray<DepValue>* deps = _dep_values[dept]; 462 est_size += deps->length() * 2; // tags and argument(s) 463 } 464 return est_size; 465 } 466 #endif // INCLUDE_JVMCI 467 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 468 DepType dept = (DepType)deptv; 469 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 470 est_size += deps->length()*2; // tags and argument(s) 471 } 472 return est_size; 473 } 474 475 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) { 476 switch (dept) { 477 case unique_concrete_method_2: 478 case unique_concrete_method_4: 479 return x->as_metadata()->as_method()->holder(); 480 default: 481 return nullptr; // let nullptr be nullptr 482 } 483 } 484 485 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) { 486 assert(must_be_in_vm(), "raw oops here"); 487 switch (dept) { 488 case unique_concrete_method_2: 489 case unique_concrete_method_4: 490 assert(x->is_method(), "sanity"); 491 return ((Method*)x)->method_holder(); 492 default: 493 return nullptr; // let nullptr be nullptr 494 } 495 } 496 497 void Dependencies::encode_content_bytes() { 498 sort_all_deps(); 499 500 // cast is safe, no deps can overflow INT_MAX 501 CompressedWriteStream bytes((int)estimate_size_in_bytes()); 502 503 #if INCLUDE_JVMCI 504 if (_using_dep_values) { 505 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 506 DepType dept = (DepType)deptv; 507 GrowableArray<DepValue>* deps = _dep_values[dept]; 508 if (deps->length() == 0) continue; 509 int stride = dep_args(dept); 510 int ctxkj = dep_context_arg(dept); // -1 if no context arg 511 assert(stride > 0, "sanity"); 512 for (int i = 0; i < deps->length(); i += stride) { 513 jbyte code_byte = (jbyte)dept; 514 int skipj = -1; 515 if (ctxkj >= 0 && ctxkj+1 < stride) { 516 Klass* ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder); 517 DepValue x = deps->at(i+ctxkj+1); // following argument 518 if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) { 519 skipj = ctxkj; // we win: maybe one less oop to keep track of 520 code_byte |= default_context_type_bit; 521 } 522 } 523 bytes.write_byte(code_byte); 524 for (int j = 0; j < stride; j++) { 525 if (j == skipj) continue; 526 DepValue v = deps->at(i+j); 527 int idx = v.index(); 528 bytes.write_int(idx); 529 } 530 } 531 } 532 } else { 533 #endif // INCLUDE_JVMCI 534 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 535 DepType dept = (DepType)deptv; 536 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 537 if (deps->length() == 0) continue; 538 int stride = dep_args(dept); 539 int ctxkj = dep_context_arg(dept); // -1 if no context arg 540 assert(stride > 0, "sanity"); 541 for (int i = 0; i < deps->length(); i += stride) { 542 jbyte code_byte = (jbyte)dept; 543 int skipj = -1; 544 if (ctxkj >= 0 && ctxkj+1 < stride) { 545 ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass(); 546 ciBaseObject* x = deps->at(i+ctxkj+1); // following argument 547 if (ctxk == ctxk_encoded_as_null(dept, x)) { 548 skipj = ctxkj; // we win: maybe one less oop to keep track of 549 code_byte |= default_context_type_bit; 550 } 551 } 552 bytes.write_byte(code_byte); 553 for (int j = 0; j < stride; j++) { 554 if (j == skipj) continue; 555 ciBaseObject* v = deps->at(i+j); 556 int idx; 557 if (v->is_object()) { 558 idx = _oop_recorder->find_index(v->as_object()->constant_encoding()); 559 } else { 560 ciMetadata* meta = v->as_metadata(); 561 idx = _oop_recorder->find_index(meta->constant_encoding()); 562 } 563 bytes.write_int(idx); 564 } 565 } 566 } 567 #if INCLUDE_JVMCI 568 } 569 #endif 570 571 // write a sentinel byte to mark the end 572 bytes.write_byte(end_marker); 573 574 // round it out to a word boundary 575 while (bytes.position() % sizeof(HeapWord) != 0) { 576 bytes.write_byte(end_marker); 577 } 578 579 // check whether the dept byte encoding really works 580 assert((jbyte)default_context_type_bit != 0, "byte overflow"); 581 582 _content_bytes = bytes.buffer(); 583 _size_in_bytes = bytes.position(); 584 } 585 586 587 const char* Dependencies::_dep_name[TYPE_LIMIT] = { 588 "end_marker", 589 "evol_method", 590 "leaf_type", 591 "abstract_with_unique_concrete_subtype", 592 "unique_concrete_method_2", 593 "unique_concrete_method_4", 594 "unique_implementor", 595 "no_finalizable_subclasses", 596 "call_site_target_value" 597 }; 598 599 int Dependencies::_dep_args[TYPE_LIMIT] = { 600 -1,// end_marker 601 1, // evol_method m 602 1, // leaf_type ctxk 603 2, // abstract_with_unique_concrete_subtype ctxk, k 604 2, // unique_concrete_method_2 ctxk, m 605 4, // unique_concrete_method_4 ctxk, m, resolved_klass, resolved_method 606 2, // unique_implementor ctxk, implementor 607 1, // no_finalizable_subclasses ctxk 608 2 // call_site_target_value call_site, method_handle 609 }; 610 611 const char* Dependencies::dep_name(Dependencies::DepType dept) { 612 if (!dept_in_mask(dept, all_types)) return "?bad-dep?"; 613 return _dep_name[dept]; 614 } 615 616 int Dependencies::dep_args(Dependencies::DepType dept) { 617 if (!dept_in_mask(dept, all_types)) return -1; 618 return _dep_args[dept]; 619 } 620 621 void Dependencies::check_valid_dependency_type(DepType dept) { 622 guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept); 623 } 624 625 Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, char** failure_detail) { 626 int klass_violations = 0; 627 DepType result = end_marker; 628 for (Dependencies::DepStream deps(this); deps.next(); ) { 629 Klass* witness = deps.check_dependency(); 630 if (witness != nullptr) { 631 if (klass_violations == 0) { 632 result = deps.type(); 633 if (failure_detail != nullptr && klass_violations == 0) { 634 // Use a fixed size buffer to prevent the string stream from 635 // resizing in the context of an inner resource mark. 636 char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN); 637 stringStream st(buffer, O_BUFLEN); 638 deps.print_dependency(&st, witness, true); 639 *failure_detail = st.as_string(); 640 } 641 } 642 klass_violations++; 643 if (xtty == nullptr) { 644 // If we're not logging then a single violation is sufficient, 645 // otherwise we want to log all the dependences which were 646 // violated. 647 break; 648 } 649 } 650 } 651 652 return result; 653 } 654 655 // for the sake of the compiler log, print out current dependencies: 656 void Dependencies::log_all_dependencies() { 657 if (log() == nullptr) return; 658 ResourceMark rm; 659 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 660 DepType dept = (DepType)deptv; 661 GrowableArray<ciBaseObject*>* deps = _deps[dept]; 662 int deplen = deps->length(); 663 if (deplen == 0) { 664 continue; 665 } 666 int stride = dep_args(dept); 667 GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride); 668 for (int i = 0; i < deps->length(); i += stride) { 669 for (int j = 0; j < stride; j++) { 670 // flush out the identities before printing 671 ciargs->push(deps->at(i+j)); 672 } 673 write_dependency_to(log(), dept, ciargs); 674 ciargs->clear(); 675 } 676 guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope"); 677 } 678 } 679 680 void Dependencies::write_dependency_to(CompileLog* log, 681 DepType dept, 682 GrowableArray<DepArgument>* args, 683 Klass* witness) { 684 if (log == nullptr) { 685 return; 686 } 687 ResourceMark rm; 688 ciEnv* env = ciEnv::current(); 689 GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length()); 690 for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) { 691 DepArgument arg = *it; 692 if (arg.is_oop()) { 693 ciargs->push(env->get_object(arg.oop_value())); 694 } else { 695 ciargs->push(env->get_metadata(arg.metadata_value())); 696 } 697 } 698 int argslen = ciargs->length(); 699 Dependencies::write_dependency_to(log, dept, ciargs, witness); 700 guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope"); 701 } 702 703 void Dependencies::write_dependency_to(CompileLog* log, 704 DepType dept, 705 GrowableArray<ciBaseObject*>* args, 706 Klass* witness) { 707 if (log == nullptr) { 708 return; 709 } 710 ResourceMark rm; 711 GrowableArray<int>* argids = new GrowableArray<int>(args->length()); 712 for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) { 713 ciBaseObject* obj = *it; 714 if (obj->is_object()) { 715 argids->push(log->identify(obj->as_object())); 716 } else { 717 argids->push(log->identify(obj->as_metadata())); 718 } 719 } 720 if (witness != nullptr) { 721 log->begin_elem("dependency_failed"); 722 } else { 723 log->begin_elem("dependency"); 724 } 725 log->print(" type='%s'", dep_name(dept)); 726 const int ctxkj = dep_context_arg(dept); // -1 if no context arg 727 if (ctxkj >= 0 && ctxkj < argids->length()) { 728 log->print(" ctxk='%d'", argids->at(ctxkj)); 729 } 730 // write remaining arguments, if any. 731 for (int j = 0; j < argids->length(); j++) { 732 if (j == ctxkj) continue; // already logged 733 if (j == 1) { 734 log->print( " x='%d'", argids->at(j)); 735 } else { 736 log->print(" x%d='%d'", j, argids->at(j)); 737 } 738 } 739 if (witness != nullptr) { 740 log->object("witness", witness); 741 log->stamp(); 742 } 743 log->end_elem(); 744 } 745 746 void Dependencies::write_dependency_to(xmlStream* xtty, 747 DepType dept, 748 GrowableArray<DepArgument>* args, 749 Klass* witness) { 750 if (xtty == nullptr) { 751 return; 752 } 753 Thread* thread = Thread::current(); 754 HandleMark rm(thread); 755 ttyLocker ttyl; 756 int ctxkj = dep_context_arg(dept); // -1 if no context arg 757 if (witness != nullptr) { 758 xtty->begin_elem("dependency_failed"); 759 } else { 760 xtty->begin_elem("dependency"); 761 } 762 xtty->print(" type='%s'", dep_name(dept)); 763 if (ctxkj >= 0) { 764 xtty->object("ctxk", args->at(ctxkj).metadata_value()); 765 } 766 // write remaining arguments, if any. 767 for (int j = 0; j < args->length(); j++) { 768 if (j == ctxkj) continue; // already logged 769 DepArgument arg = args->at(j); 770 if (j == 1) { 771 if (arg.is_oop()) { 772 xtty->object("x", Handle(thread, arg.oop_value())); 773 } else { 774 xtty->object("x", arg.metadata_value()); 775 } 776 } else { 777 char xn[12]; 778 os::snprintf_checked(xn, sizeof(xn), "x%d", j); 779 if (arg.is_oop()) { 780 xtty->object(xn, Handle(thread, arg.oop_value())); 781 } else { 782 xtty->object(xn, arg.metadata_value()); 783 } 784 } 785 } 786 if (witness != nullptr) { 787 xtty->object("witness", witness); 788 xtty->stamp(); 789 } 790 xtty->end_elem(); 791 } 792 793 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args, 794 Klass* witness, outputStream* st) { 795 ResourceMark rm; 796 ttyLocker ttyl; // keep the following output all in one block 797 st->print_cr("%s of type %s", 798 (witness == nullptr)? "Dependency": "Failed dependency", 799 dep_name(dept)); 800 // print arguments 801 int ctxkj = dep_context_arg(dept); // -1 if no context arg 802 for (int j = 0; j < args->length(); j++) { 803 DepArgument arg = args->at(j); 804 bool put_star = false; 805 if (arg.is_null()) continue; 806 const char* what; 807 if (j == ctxkj) { 808 assert(arg.is_metadata(), "must be"); 809 what = "context"; 810 put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value()); 811 } else if (arg.is_method()) { 812 what = "method "; 813 put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), nullptr); 814 } else if (arg.is_klass()) { 815 what = "class "; 816 } else { 817 what = "object "; 818 } 819 st->print(" %s = %s", what, (put_star? "*": "")); 820 if (arg.is_klass()) { 821 st->print("%s", ((Klass*)arg.metadata_value())->external_name()); 822 } else if (arg.is_method()) { 823 ((Method*)arg.metadata_value())->print_value_on(st); 824 } else if (arg.is_oop()) { 825 arg.oop_value()->print_value_on(st); 826 } else { 827 ShouldNotReachHere(); // Provide impl for this type. 828 } 829 830 st->cr(); 831 } 832 if (witness != nullptr) { 833 bool put_star = !Dependencies::is_concrete_klass(witness); 834 st->print_cr(" witness = %s%s", 835 (put_star? "*": ""), 836 witness->external_name()); 837 } 838 } 839 840 void Dependencies::DepStream::log_dependency(Klass* witness) { 841 if (_deps == nullptr && xtty == nullptr) return; // fast cutout for runtime 842 ResourceMark rm; 843 const int nargs = argument_count(); 844 GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs); 845 for (int j = 0; j < nargs; j++) { 846 if (is_oop_argument(j)) { 847 args->push(argument_oop(j)); 848 } else { 849 args->push(argument(j)); 850 } 851 } 852 int argslen = args->length(); 853 if (_deps != nullptr && _deps->log() != nullptr) { 854 if (ciEnv::current() != nullptr) { 855 Dependencies::write_dependency_to(_deps->log(), type(), args, witness); 856 } else { 857 // Treat the CompileLog as an xmlstream instead 858 Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness); 859 } 860 } else { 861 Dependencies::write_dependency_to(xtty, type(), args, witness); 862 } 863 guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope"); 864 } 865 866 void Dependencies::DepStream::print_dependency(outputStream* st, Klass* witness, bool verbose) { 867 ResourceMark rm; 868 int nargs = argument_count(); 869 GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs); 870 for (int j = 0; j < nargs; j++) { 871 if (is_oop_argument(j)) { 872 args->push(argument_oop(j)); 873 } else { 874 args->push(argument(j)); 875 } 876 } 877 int argslen = args->length(); 878 Dependencies::print_dependency(type(), args, witness, st); 879 if (verbose) { 880 if (_code != nullptr) { 881 st->print(" code: "); 882 _code->print_value_on(st); 883 st->cr(); 884 } 885 } 886 guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope"); 887 } 888 889 890 /// Dependency stream support (decodes dependencies from an nmethod): 891 892 #ifdef ASSERT 893 void Dependencies::DepStream::initial_asserts(size_t byte_limit) { 894 assert(must_be_in_vm(), "raw oops here"); 895 _byte_limit = byte_limit; 896 _type = undefined_dependency; // defeat "already at end" assert 897 assert((_code!=nullptr) + (_deps!=nullptr) == 1, "one or t'other"); 898 } 899 #endif //ASSERT 900 901 bool Dependencies::DepStream::next() { 902 assert(_type != end_marker, "already at end"); 903 if (_bytes.position() == 0 && _code != nullptr 904 && _code->dependencies_size() == 0) { 905 // Method has no dependencies at all. 906 return false; 907 } 908 int code_byte = (_bytes.read_byte() & 0xFF); 909 if (code_byte == end_marker) { 910 DEBUG_ONLY(_type = end_marker); 911 return false; 912 } else { 913 int ctxk_bit = (code_byte & Dependencies::default_context_type_bit); 914 code_byte -= ctxk_bit; 915 DepType dept = (DepType)code_byte; 916 _type = dept; 917 Dependencies::check_valid_dependency_type(dept); 918 int stride = _dep_args[dept]; 919 assert(stride == dep_args(dept), "sanity"); 920 int skipj = -1; 921 if (ctxk_bit != 0) { 922 skipj = 0; // currently the only context argument is at zero 923 assert(skipj == dep_context_arg(dept), "zero arg always ctxk"); 924 } 925 for (int j = 0; j < stride; j++) { 926 _xi[j] = (j == skipj)? 0: _bytes.read_int(); 927 } 928 DEBUG_ONLY(_xi[stride] = -1); // help detect overruns 929 return true; 930 } 931 } 932 933 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) { 934 Metadata* o = nullptr; 935 if (_code != nullptr) { 936 o = _code->metadata_at(i); 937 } else { 938 o = _deps->oop_recorder()->metadata_at(i); 939 } 940 return o; 941 } 942 943 inline oop Dependencies::DepStream::recorded_oop_at(int i) { 944 return (_code != nullptr) 945 ? _code->oop_at(i) 946 : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i)); 947 } 948 949 Metadata* Dependencies::DepStream::argument(int i) { 950 Metadata* result = recorded_metadata_at(argument_index(i)); 951 952 if (result == nullptr) { // Explicit context argument can be compressed 953 int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg 954 if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) { 955 result = ctxk_encoded_as_null(type(), argument(ctxkj+1)); 956 } 957 } 958 959 assert(result == nullptr || result->is_klass() || result->is_method(), "must be"); 960 return result; 961 } 962 963 /** 964 * Returns a unique identifier for each dependency argument. 965 */ 966 uintptr_t Dependencies::DepStream::get_identifier(int i) { 967 if (is_oop_argument(i)) { 968 return (uintptr_t)(oopDesc*)argument_oop(i); 969 } else { 970 return (uintptr_t)argument(i); 971 } 972 } 973 974 oop Dependencies::DepStream::argument_oop(int i) { 975 oop result = recorded_oop_at(argument_index(i)); 976 assert(oopDesc::is_oop_or_null(result), "must be"); 977 return result; 978 } 979 980 InstanceKlass* Dependencies::DepStream::context_type() { 981 assert(must_be_in_vm(), "raw oops here"); 982 983 // Most dependencies have an explicit context type argument. 984 { 985 int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg 986 if (ctxkj >= 0) { 987 Metadata* k = argument(ctxkj); 988 assert(k != nullptr && k->is_klass(), "type check"); 989 return InstanceKlass::cast((Klass*)k); 990 } 991 } 992 993 // Some dependencies are using the klass of the first object 994 // argument as implicit context type. 995 { 996 int ctxkj = dep_implicit_context_arg(type()); 997 if (ctxkj >= 0) { 998 Klass* k = argument_oop(ctxkj)->klass(); 999 assert(k != nullptr, "type check"); 1000 return InstanceKlass::cast(k); 1001 } 1002 } 1003 1004 // And some dependencies don't have a context type at all, 1005 // e.g. evol_method. 1006 return nullptr; 1007 } 1008 1009 // ----------------- DependencySignature -------------------------------------- 1010 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) { 1011 if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) { 1012 return false; 1013 } 1014 1015 for (int i = 0; i < s1.args_count(); i++) { 1016 if (s1.arg(i) != s2.arg(i)) { 1017 return false; 1018 } 1019 } 1020 return true; 1021 } 1022 1023 /// Checking dependencies 1024 1025 // This hierarchy walker inspects subtypes of a given type, trying to find a "bad" class which breaks a dependency. 1026 // Such a class is called a "witness" to the broken dependency. 1027 // While searching around, we ignore "participants", which are already known to the dependency. 1028 class AbstractClassHierarchyWalker { 1029 public: 1030 enum { PARTICIPANT_LIMIT = 3 }; 1031 1032 private: 1033 // if non-zero, tells how many witnesses to convert to participants 1034 uint _record_witnesses; 1035 1036 // special classes which are not allowed to be witnesses: 1037 Klass* _participants[PARTICIPANT_LIMIT+1]; 1038 uint _num_participants; 1039 1040 #ifdef ASSERT 1041 uint _nof_requests; // one-shot walker 1042 #endif // ASSERT 1043 1044 static PerfCounter* _perf_find_witness_anywhere_calls_count; 1045 static PerfCounter* _perf_find_witness_anywhere_steps_count; 1046 static PerfCounter* _perf_find_witness_in_calls_count; 1047 1048 protected: 1049 virtual Klass* find_witness_in(KlassDepChange& changes) = 0; 1050 virtual Klass* find_witness_anywhere(InstanceKlass* context_type) = 0; 1051 1052 AbstractClassHierarchyWalker(Klass* participant) : _record_witnesses(0), _num_participants(0) 1053 #ifdef ASSERT 1054 , _nof_requests(0) 1055 #endif // ASSERT 1056 { 1057 for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) { 1058 _participants[i] = nullptr; 1059 } 1060 if (participant != nullptr) { 1061 add_participant(participant); 1062 } 1063 } 1064 1065 bool is_participant(Klass* k) { 1066 for (uint i = 0; i < _num_participants; i++) { 1067 if (_participants[i] == k) { 1068 return true; 1069 } 1070 } 1071 return false; 1072 } 1073 1074 bool record_witness(Klass* witness) { 1075 if (_record_witnesses > 0) { 1076 --_record_witnesses; 1077 add_participant(witness); 1078 return false; // not a witness 1079 } else { 1080 return true; // is a witness 1081 } 1082 } 1083 1084 class CountingClassHierarchyIterator : public ClassHierarchyIterator { 1085 private: 1086 jlong _nof_steps; 1087 public: 1088 CountingClassHierarchyIterator(InstanceKlass* root) : ClassHierarchyIterator(root), _nof_steps(0) {} 1089 1090 void next() { 1091 _nof_steps++; 1092 ClassHierarchyIterator::next(); 1093 } 1094 1095 ~CountingClassHierarchyIterator() { 1096 if (UsePerfData) { 1097 _perf_find_witness_anywhere_steps_count->inc(_nof_steps); 1098 } 1099 } 1100 }; 1101 1102 public: 1103 uint num_participants() { return _num_participants; } 1104 Klass* participant(uint n) { 1105 assert(n <= _num_participants, "oob"); 1106 if (n < _num_participants) { 1107 return _participants[n]; 1108 } else { 1109 return nullptr; 1110 } 1111 } 1112 1113 void add_participant(Klass* participant) { 1114 assert(!is_participant(participant), "sanity"); 1115 assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob"); 1116 uint np = _num_participants++; 1117 _participants[np] = participant; 1118 } 1119 1120 void record_witnesses(uint add) { 1121 if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT; 1122 assert(_num_participants + add < PARTICIPANT_LIMIT, "oob"); 1123 _record_witnesses = add; 1124 } 1125 1126 Klass* find_witness(InstanceKlass* context_type, KlassDepChange* changes = nullptr); 1127 1128 static void init(); 1129 static void print_statistics(); 1130 }; 1131 1132 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_calls_count = nullptr; 1133 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_steps_count = nullptr; 1134 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_in_calls_count = nullptr; 1135 1136 void AbstractClassHierarchyWalker::init() { 1137 if (UsePerfData) { 1138 EXCEPTION_MARK; 1139 _perf_find_witness_anywhere_calls_count = 1140 PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhere", PerfData::U_Events, CHECK); 1141 _perf_find_witness_anywhere_steps_count = 1142 PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhereSteps", PerfData::U_Events, CHECK); 1143 _perf_find_witness_in_calls_count = 1144 PerfDataManager::create_counter(SUN_CI, "findWitnessIn", PerfData::U_Events, CHECK); 1145 } 1146 } 1147 1148 Klass* AbstractClassHierarchyWalker::find_witness(InstanceKlass* context_type, KlassDepChange* changes) { 1149 // Current thread must be in VM (not native mode, as in CI): 1150 assert(must_be_in_vm(), "raw oops here"); 1151 // Must not move the class hierarchy during this check: 1152 assert_locked_or_safepoint(Compile_lock); 1153 assert(_nof_requests++ == 0, "repeated requests are not supported"); 1154 1155 assert(changes == nullptr || changes->involves_context(context_type), "irrelevant dependency"); 1156 1157 // (Note: Interfaces do not have subclasses.) 1158 // If it is an interface, search its direct implementors. 1159 // (Their subclasses are additional indirect implementors. See InstanceKlass::add_implementor().) 1160 if (context_type->is_interface()) { 1161 int nof_impls = context_type->nof_implementors(); 1162 if (nof_impls == 0) { 1163 return nullptr; // no implementors 1164 } else if (nof_impls == 1) { // unique implementor 1165 assert(context_type != context_type->implementor(), "not unique"); 1166 context_type = context_type->implementor(); 1167 } else { // nof_impls >= 2 1168 // Avoid this case: *I.m > { A.m, C }; B.m > C 1169 // Here, I.m has 2 concrete implementations, but m appears unique 1170 // as A.m, because the search misses B.m when checking C. 1171 // The inherited method B.m was getting missed by the walker 1172 // when interface 'I' was the starting point. 1173 // %%% Until this is fixed more systematically, bail out. 1174 return context_type; 1175 } 1176 } 1177 assert(!context_type->is_interface(), "no interfaces allowed"); 1178 1179 if (changes != nullptr) { 1180 if (UsePerfData) { 1181 _perf_find_witness_in_calls_count->inc(); 1182 } 1183 return find_witness_in(*changes); 1184 } else { 1185 if (UsePerfData) { 1186 _perf_find_witness_anywhere_calls_count->inc(); 1187 } 1188 return find_witness_anywhere(context_type); 1189 } 1190 } 1191 1192 class ConcreteSubtypeFinder : public AbstractClassHierarchyWalker { 1193 private: 1194 bool is_witness(Klass* k); 1195 1196 protected: 1197 virtual Klass* find_witness_in(KlassDepChange& changes); 1198 virtual Klass* find_witness_anywhere(InstanceKlass* context_type); 1199 1200 public: 1201 ConcreteSubtypeFinder(Klass* participant = nullptr) : AbstractClassHierarchyWalker(participant) {} 1202 }; 1203 1204 bool ConcreteSubtypeFinder::is_witness(Klass* k) { 1205 if (Dependencies::is_concrete_klass(k)) { 1206 return record_witness(k); // concrete subtype 1207 } else { 1208 return false; // not a concrete class 1209 } 1210 } 1211 1212 Klass* ConcreteSubtypeFinder::find_witness_in(KlassDepChange& changes) { 1213 // When looking for unexpected concrete types, do not look beneath expected ones: 1214 // * CX > CC > C' is OK, even if C' is new. 1215 // * CX > { CC, C' } is not OK if C' is new, and C' is the witness. 1216 Klass* new_type = changes.as_new_klass_change()->new_type(); 1217 assert(!is_participant(new_type), "only old classes are participants"); 1218 // If the new type is a subtype of a participant, we are done. 1219 for (uint i = 0; i < num_participants(); i++) { 1220 if (changes.involves_context(participant(i))) { 1221 // new guy is protected from this check by previous participant 1222 return nullptr; 1223 } 1224 } 1225 if (is_witness(new_type)) { 1226 return new_type; 1227 } 1228 // No witness found. The dependency remains unbroken. 1229 return nullptr; 1230 } 1231 1232 Klass* ConcreteSubtypeFinder::find_witness_anywhere(InstanceKlass* context_type) { 1233 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) { 1234 Klass* sub = iter.klass(); 1235 // Do not report participant types. 1236 if (is_participant(sub)) { 1237 // Don't walk beneath a participant since it hides witnesses. 1238 iter.skip_subclasses(); 1239 } else if (is_witness(sub)) { 1240 return sub; // found a witness 1241 } 1242 } 1243 // No witness found. The dependency remains unbroken. 1244 return nullptr; 1245 } 1246 1247 class ConcreteMethodFinder : public AbstractClassHierarchyWalker { 1248 private: 1249 Symbol* _name; 1250 Symbol* _signature; 1251 1252 // cache of method lookups 1253 Method* _found_methods[PARTICIPANT_LIMIT+1]; 1254 1255 bool is_witness(Klass* k); 1256 1257 protected: 1258 virtual Klass* find_witness_in(KlassDepChange& changes); 1259 virtual Klass* find_witness_anywhere(InstanceKlass* context_type); 1260 1261 public: 1262 bool witnessed_reabstraction_in_supers(Klass* k); 1263 1264 ConcreteMethodFinder(Method* m, Klass* participant = nullptr) : AbstractClassHierarchyWalker(participant) { 1265 assert(m != nullptr && m->is_method(), "sanity"); 1266 _name = m->name(); 1267 _signature = m->signature(); 1268 1269 for (int i = 0; i < PARTICIPANT_LIMIT+1; i++) { 1270 _found_methods[i] = nullptr; 1271 } 1272 } 1273 1274 // Note: If n==num_participants, returns nullptr. 1275 Method* found_method(uint n) { 1276 assert(n <= num_participants(), "oob"); 1277 Method* fm = _found_methods[n]; 1278 assert(n == num_participants() || fm != nullptr, "proper usage"); 1279 if (fm != nullptr && fm->method_holder() != participant(n)) { 1280 // Default methods from interfaces can be added to classes. In 1281 // that case the holder of the method is not the class but the 1282 // interface where it's defined. 1283 assert(fm->is_default_method(), "sanity"); 1284 return nullptr; 1285 } 1286 return fm; 1287 } 1288 1289 void add_participant(Klass* participant) { 1290 AbstractClassHierarchyWalker::add_participant(participant); 1291 _found_methods[num_participants()] = nullptr; 1292 } 1293 1294 bool record_witness(Klass* witness, Method* m) { 1295 _found_methods[num_participants()] = m; 1296 return AbstractClassHierarchyWalker::record_witness(witness); 1297 } 1298 1299 private: 1300 static PerfCounter* _perf_find_witness_anywhere_calls_count; 1301 static PerfCounter* _perf_find_witness_anywhere_steps_count; 1302 static PerfCounter* _perf_find_witness_in_calls_count; 1303 1304 public: 1305 static void init(); 1306 static void print_statistics(); 1307 }; 1308 1309 bool ConcreteMethodFinder::is_witness(Klass* k) { 1310 if (is_participant(k)) { 1311 return false; // do not report participant types 1312 } 1313 if (k->is_instance_klass()) { 1314 InstanceKlass* ik = InstanceKlass::cast(k); 1315 // Search class hierarchy first, skipping private implementations 1316 // as they never override any inherited methods 1317 Method* m = ik->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip); 1318 if (Dependencies::is_concrete_method(m, ik)) { 1319 return record_witness(k, m); // concrete method found 1320 } else { 1321 // Check for re-abstraction of method 1322 if (!ik->is_interface() && m != nullptr && m->is_abstract()) { 1323 // Found a matching abstract method 'm' in the class hierarchy. 1324 // This is fine iff 'k' is an abstract class and all concrete subtypes 1325 // of 'k' override 'm' and are participates of the current search. 1326 ConcreteSubtypeFinder wf; 1327 for (uint i = 0; i < num_participants(); i++) { 1328 Klass* p = participant(i); 1329 wf.add_participant(p); 1330 } 1331 Klass* w = wf.find_witness(ik); 1332 if (w != nullptr) { 1333 Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip); 1334 if (!Dependencies::is_concrete_method(wm, w)) { 1335 // Found a concrete subtype 'w' which does not override abstract method 'm'. 1336 // Bail out because 'm' could be called with 'w' as receiver (leading to an 1337 // AbstractMethodError) and thus the method we are looking for is not unique. 1338 return record_witness(k, m); 1339 } 1340 } 1341 } 1342 // Check interface defaults also, if any exist. 1343 Array<Method*>* default_methods = ik->default_methods(); 1344 if (default_methods != nullptr) { 1345 Method* dm = ik->find_method(default_methods, _name, _signature); 1346 if (Dependencies::is_concrete_method(dm, nullptr)) { 1347 return record_witness(k, dm); // default method found 1348 } 1349 } 1350 return false; // no concrete method found 1351 } 1352 } else { 1353 return false; // no methods to find in an array type 1354 } 1355 } 1356 1357 Klass* ConcreteMethodFinder::find_witness_in(KlassDepChange& changes) { 1358 // When looking for unexpected concrete methods, look beneath expected ones, to see if there are overrides. 1359 // * CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness. 1360 Klass* new_type = changes.as_new_klass_change()->new_type(); 1361 assert(!is_participant(new_type), "only old classes are participants"); 1362 if (is_witness(new_type)) { 1363 return new_type; 1364 } else { 1365 // No witness found, but is_witness() doesn't detect method re-abstraction in case of spot-checking. 1366 if (witnessed_reabstraction_in_supers(new_type)) { 1367 return new_type; 1368 } 1369 } 1370 // No witness found. The dependency remains unbroken. 1371 return nullptr; 1372 } 1373 1374 bool ConcreteMethodFinder::witnessed_reabstraction_in_supers(Klass* k) { 1375 if (!k->is_instance_klass()) { 1376 return false; // no methods to find in an array type 1377 } else { 1378 // Looking for a case when an abstract method is inherited into a concrete class. 1379 if (Dependencies::is_concrete_klass(k) && !k->is_interface()) { 1380 Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip); 1381 if (m != nullptr) { 1382 return false; // no reabstraction possible: local method found 1383 } 1384 for (InstanceKlass* super = k->java_super(); super != nullptr; super = super->java_super()) { 1385 m = super->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip); 1386 if (m != nullptr) { // inherited method found 1387 if (m->is_abstract() || m->is_overpass()) { 1388 return record_witness(super, m); // abstract method found 1389 } 1390 return false; 1391 } 1392 } 1393 // Miranda. 1394 return true; 1395 } 1396 return false; 1397 } 1398 } 1399 1400 1401 Klass* ConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) { 1402 // Walk hierarchy under a context type, looking for unexpected types. 1403 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) { 1404 Klass* sub = iter.klass(); 1405 if (is_witness(sub)) { 1406 return sub; // found a witness 1407 } 1408 } 1409 // No witness found. The dependency remains unbroken. 1410 return nullptr; 1411 } 1412 1413 // For some method m and some class ctxk (subclass of method holder), 1414 // enumerate all distinct overrides of m in concrete subclasses of ctxk. 1415 // It relies on vtable/itable information to perform method selection on each linked subclass 1416 // and ignores all non yet linked ones (speculatively treat them as "effectively abstract"). 1417 class LinkedConcreteMethodFinder : public AbstractClassHierarchyWalker { 1418 private: 1419 InstanceKlass* _resolved_klass; // resolved class (JVMS-5.4.3.1) 1420 InstanceKlass* _declaring_klass; // the holder of resolved method (JVMS-5.4.3.3) 1421 int _vtable_index; // vtable/itable index of the resolved method 1422 bool _do_itable_lookup; // choose between itable and vtable lookup logic 1423 1424 // cache of method lookups 1425 Method* _found_methods[PARTICIPANT_LIMIT+1]; 1426 1427 bool is_witness(Klass* k); 1428 Method* select_method(InstanceKlass* recv_klass); 1429 static int compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method, bool& is_itable_index); 1430 static bool is_concrete_klass(InstanceKlass* ik); 1431 1432 void add_participant(Method* m, Klass* participant) { 1433 uint np = num_participants(); 1434 AbstractClassHierarchyWalker::add_participant(participant); 1435 assert(np + 1 == num_participants(), "sanity"); 1436 _found_methods[np] = m; // record the method for the participant 1437 } 1438 1439 bool record_witness(Klass* witness, Method* m) { 1440 for (uint i = 0; i < num_participants(); i++) { 1441 if (found_method(i) == m) { 1442 return false; // already recorded 1443 } 1444 } 1445 // Record not yet seen method. 1446 _found_methods[num_participants()] = m; 1447 return AbstractClassHierarchyWalker::record_witness(witness); 1448 } 1449 1450 void initialize(Method* participant) { 1451 for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) { 1452 _found_methods[i] = nullptr; 1453 } 1454 if (participant != nullptr) { 1455 add_participant(participant, participant->method_holder()); 1456 } 1457 } 1458 1459 protected: 1460 virtual Klass* find_witness_in(KlassDepChange& changes); 1461 virtual Klass* find_witness_anywhere(InstanceKlass* context_type); 1462 1463 public: 1464 // In order to perform method selection, the following info is needed: 1465 // (1) interface or virtual call; 1466 // (2) vtable/itable index; 1467 // (3) declaring class (in case of interface call). 1468 // 1469 // It is prepared based on the results of method resolution: resolved class and resolved method (as specified in JVMS-5.4.3.3). 1470 // Optionally, a method which was previously determined as a unique target (uniqm) is added as a participant 1471 // to enable dependency spot-checking and speed up the search. 1472 LinkedConcreteMethodFinder(InstanceKlass* resolved_klass, Method* resolved_method, Method* uniqm = nullptr) : AbstractClassHierarchyWalker(nullptr) { 1473 assert(resolved_klass->is_linked(), "required"); 1474 assert(resolved_method->method_holder()->is_linked(), "required"); 1475 assert(!resolved_method->can_be_statically_bound(), "no vtable index available"); 1476 1477 _resolved_klass = resolved_klass; 1478 _declaring_klass = resolved_method->method_holder(); 1479 _vtable_index = compute_vtable_index(resolved_klass, resolved_method, 1480 _do_itable_lookup); // out parameter 1481 assert(_vtable_index >= 0, "invalid vtable index"); 1482 1483 initialize(uniqm); 1484 } 1485 1486 // Note: If n==num_participants, returns nullptr. 1487 Method* found_method(uint n) { 1488 assert(n <= num_participants(), "oob"); 1489 assert(participant(n) != nullptr || n == num_participants(), "proper usage"); 1490 return _found_methods[n]; 1491 } 1492 }; 1493 1494 Klass* LinkedConcreteMethodFinder::find_witness_in(KlassDepChange& changes) { 1495 Klass* type = changes.type(); 1496 1497 assert(!is_participant(type), "only old classes are participants"); 1498 1499 if (is_witness(type)) { 1500 return type; 1501 } 1502 return nullptr; // No witness found. The dependency remains unbroken. 1503 } 1504 1505 Klass* LinkedConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) { 1506 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) { 1507 Klass* sub = iter.klass(); 1508 if (is_witness(sub)) { 1509 return sub; 1510 } 1511 if (sub->is_instance_klass() && !InstanceKlass::cast(sub)->is_linked()) { 1512 iter.skip_subclasses(); // ignore not yet linked classes 1513 } 1514 } 1515 return nullptr; // No witness found. The dependency remains unbroken. 1516 } 1517 1518 bool LinkedConcreteMethodFinder::is_witness(Klass* k) { 1519 if (is_participant(k)) { 1520 return false; // do not report participant types 1521 } else if (k->is_instance_klass()) { 1522 InstanceKlass* ik = InstanceKlass::cast(k); 1523 if (is_concrete_klass(ik)) { 1524 Method* m = select_method(ik); 1525 return record_witness(ik, m); 1526 } else { 1527 return false; // ignore non-concrete holder class 1528 } 1529 } else { 1530 return false; // no methods to find in an array type 1531 } 1532 } 1533 1534 Method* LinkedConcreteMethodFinder::select_method(InstanceKlass* recv_klass) { 1535 Method* selected_method = nullptr; 1536 if (_do_itable_lookup) { 1537 assert(_declaring_klass->is_interface(), "sanity"); 1538 bool implements_interface; // initialized by method_at_itable_or_null() 1539 selected_method = recv_klass->method_at_itable_or_null(_declaring_klass, _vtable_index, 1540 implements_interface); // out parameter 1541 assert(implements_interface, "not implemented"); 1542 } else { 1543 selected_method = recv_klass->method_at_vtable(_vtable_index); 1544 } 1545 return selected_method; // nullptr when corresponding slot is empty (AbstractMethodError case) 1546 } 1547 1548 int LinkedConcreteMethodFinder::compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method, 1549 // out parameter 1550 bool& is_itable_index) { 1551 if (resolved_klass->is_interface() && resolved_method->has_itable_index()) { 1552 is_itable_index = true; 1553 return resolved_method->itable_index(); 1554 } 1555 // Check for default or miranda method first. 1556 InstanceKlass* declaring_klass = resolved_method->method_holder(); 1557 if (!resolved_klass->is_interface() && declaring_klass->is_interface()) { 1558 is_itable_index = false; 1559 return resolved_klass->vtable_index_of_interface_method(resolved_method); 1560 } 1561 // At this point we are sure that resolved_method is virtual and not 1562 // a default or miranda method; therefore, it must have a valid vtable index. 1563 assert(resolved_method->has_vtable_index(), ""); 1564 is_itable_index = false; 1565 return resolved_method->vtable_index(); 1566 } 1567 1568 bool LinkedConcreteMethodFinder::is_concrete_klass(InstanceKlass* ik) { 1569 if (!Dependencies::is_concrete_klass(ik)) { 1570 return false; // not concrete 1571 } 1572 if (ik->is_interface()) { 1573 return false; // interfaces aren't concrete 1574 } 1575 if (!ik->is_linked()) { 1576 return false; // not yet linked classes don't have instances 1577 } 1578 return true; 1579 } 1580 1581 #ifdef ASSERT 1582 // Assert that m is inherited into ctxk, without intervening overrides. 1583 // (May return true even if this is not true, in corner cases where we punt.) 1584 bool Dependencies::verify_method_context(InstanceKlass* ctxk, Method* m) { 1585 if (m->is_private()) { 1586 return false; // Quick lose. Should not happen. 1587 } 1588 if (m->method_holder() == ctxk) { 1589 return true; // Quick win. 1590 } 1591 if (!(m->is_public() || m->is_protected())) { 1592 // The override story is complex when packages get involved. 1593 return true; // Must punt the assertion to true. 1594 } 1595 Method* lm = ctxk->lookup_method(m->name(), m->signature()); 1596 if (lm == nullptr) { 1597 // It might be an interface method 1598 lm = ctxk->lookup_method_in_ordered_interfaces(m->name(), m->signature()); 1599 } 1600 if (lm == m) { 1601 // Method m is inherited into ctxk. 1602 return true; 1603 } 1604 if (lm != nullptr) { 1605 if (!(lm->is_public() || lm->is_protected())) { 1606 // Method is [package-]private, so the override story is complex. 1607 return true; // Must punt the assertion to true. 1608 } 1609 if (lm->is_static()) { 1610 // Static methods don't override non-static so punt 1611 return true; 1612 } 1613 if (!Dependencies::is_concrete_method(lm, ctxk) && 1614 !Dependencies::is_concrete_method(m, ctxk)) { 1615 // They are both non-concrete 1616 if (lm->method_holder()->is_subtype_of(m->method_holder())) { 1617 // Method m is overridden by lm, but both are non-concrete. 1618 return true; 1619 } 1620 if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() && 1621 ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) { 1622 // Interface method defined in multiple super interfaces 1623 return true; 1624 } 1625 } 1626 } 1627 ResourceMark rm; 1628 tty->print_cr("Dependency method not found in the associated context:"); 1629 tty->print_cr(" context = %s", ctxk->external_name()); 1630 tty->print( " method = "); m->print_short_name(tty); tty->cr(); 1631 if (lm != nullptr) { 1632 tty->print( " found = "); lm->print_short_name(tty); tty->cr(); 1633 } 1634 return false; 1635 } 1636 #endif // ASSERT 1637 1638 bool Dependencies::is_concrete_klass(Klass* k) { 1639 if (k->is_abstract()) return false; 1640 // %%% We could treat classes which are concrete but 1641 // have not yet been instantiated as virtually abstract. 1642 // This would require a deoptimization barrier on first instantiation. 1643 //if (k->is_not_instantiated()) return false; 1644 return true; 1645 } 1646 1647 bool Dependencies::is_concrete_method(Method* m, Klass* k) { 1648 // nullptr is not a concrete method. 1649 if (m == nullptr) { 1650 return false; 1651 } 1652 // Statics are irrelevant to virtual call sites. 1653 if (m->is_static()) { 1654 return false; 1655 } 1656 // Abstract methods are not concrete. 1657 if (m->is_abstract()) { 1658 return false; 1659 } 1660 // Overpass (error) methods are not concrete if k is abstract. 1661 if (m->is_overpass() && k != nullptr) { 1662 return !k->is_abstract(); 1663 } 1664 // Note "true" is conservative answer: overpass clause is false if k == nullptr, 1665 // implies return true if answer depends on overpass clause. 1666 return true; 1667 } 1668 1669 Klass* Dependencies::find_finalizable_subclass(InstanceKlass* ik) { 1670 for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) { 1671 Klass* sub = iter.klass(); 1672 if (sub->has_finalizer() && !sub->is_interface()) { 1673 return sub; 1674 } 1675 } 1676 return nullptr; // not found 1677 } 1678 1679 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) { 1680 if (k->is_abstract()) return false; 1681 // We could also return false if k does not yet appear to be 1682 // instantiated, if the VM version supports this distinction also. 1683 //if (k->is_not_instantiated()) return false; 1684 return true; 1685 } 1686 1687 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) { 1688 return k->has_finalizable_subclass(); 1689 } 1690 1691 // Any use of the contents (bytecodes) of a method must be 1692 // marked by an "evol_method" dependency, if those contents 1693 // can change. (Note: A method is always dependent on itself.) 1694 Klass* Dependencies::check_evol_method(Method* m) { 1695 assert(must_be_in_vm(), "raw oops here"); 1696 // Did somebody do a JVMTI RedefineClasses while our backs were turned? 1697 // Or is there a now a breakpoint? 1698 // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.) 1699 if (m->is_old() 1700 || m->number_of_breakpoints() > 0 1701 || m->mismatch()) { 1702 return m->method_holder(); 1703 } else { 1704 return nullptr; 1705 } 1706 } 1707 1708 // This is a strong assertion: It is that the given type 1709 // has no subtypes whatever. It is most useful for 1710 // optimizing checks on reflected types or on array types. 1711 // (Checks on types which are derived from real instances 1712 // can be optimized more strongly than this, because we 1713 // know that the checked type comes from a concrete type, 1714 // and therefore we can disregard abstract types.) 1715 Klass* Dependencies::check_leaf_type(InstanceKlass* ctxk) { 1716 assert(must_be_in_vm(), "raw oops here"); 1717 assert_locked_or_safepoint(Compile_lock); 1718 Klass* sub = ctxk->subklass(); 1719 if (sub != nullptr) { 1720 return sub; 1721 } else if (ctxk->nof_implementors() != 0) { 1722 // if it is an interface, it must be unimplemented 1723 // (if it is not an interface, nof_implementors is always zero) 1724 InstanceKlass* impl = ctxk->implementor(); 1725 assert(impl != nullptr, "must be set"); 1726 return impl; 1727 } else { 1728 return nullptr; 1729 } 1730 } 1731 1732 // Test the assertion that conck is the only concrete subtype* of ctxk. 1733 // The type conck itself is allowed to have have further concrete subtypes. 1734 // This allows the compiler to narrow occurrences of ctxk by conck, 1735 // when dealing with the types of actual instances. 1736 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(InstanceKlass* ctxk, 1737 Klass* conck, 1738 NewKlassDepChange* changes) { 1739 ConcreteSubtypeFinder wf(conck); 1740 Klass* k = wf.find_witness(ctxk, changes); 1741 return k; 1742 } 1743 1744 1745 // Find the unique concrete proper subtype of ctxk, or nullptr if there 1746 // is more than one concrete proper subtype. If there are no concrete 1747 // proper subtypes, return ctxk itself, whether it is concrete or not. 1748 // The returned subtype is allowed to have have further concrete subtypes. 1749 // That is, return CC1 for CX > CC1 > CC2, but nullptr for CX > { CC1, CC2 }. 1750 Klass* Dependencies::find_unique_concrete_subtype(InstanceKlass* ctxk) { 1751 ConcreteSubtypeFinder wf(ctxk); // Ignore ctxk when walking. 1752 wf.record_witnesses(1); // Record one other witness when walking. 1753 Klass* wit = wf.find_witness(ctxk); 1754 if (wit != nullptr) return nullptr; // Too many witnesses. 1755 Klass* conck = wf.participant(0); 1756 if (conck == nullptr) { 1757 return ctxk; // Return ctxk as a flag for "no subtypes". 1758 } else { 1759 #ifndef PRODUCT 1760 // Make sure the dependency mechanism will pass this discovery: 1761 if (VerifyDependencies) { 1762 // Turn off dependency tracing while actually testing deps. 1763 FlagSetting fs(_verify_in_progress, true); 1764 if (!Dependencies::is_concrete_klass(ctxk)) { 1765 guarantee(nullptr == (void *) 1766 check_abstract_with_unique_concrete_subtype(ctxk, conck), 1767 "verify dep."); 1768 } 1769 } 1770 #endif //PRODUCT 1771 return conck; 1772 } 1773 } 1774 1775 // Try to determine whether root method in some context is concrete or not based on the information about the unique method 1776 // in that context. It exploits the fact that concrete root method is always inherited into the context when there's a unique method. 1777 // Hence, unique method holder is always a supertype of the context class when root method is concrete. 1778 // Examples for concrete_root_method 1779 // C (C.m uniqm) 1780 // | 1781 // CX (ctxk) uniqm is inherited into context. 1782 // 1783 // CX (ctxk) (CX.m uniqm) here uniqm is defined in ctxk. 1784 // Examples for !concrete_root_method 1785 // CX (ctxk) 1786 // | 1787 // C (C.m uniqm) uniqm is in subtype of ctxk. 1788 bool Dependencies::is_concrete_root_method(Method* uniqm, InstanceKlass* ctxk) { 1789 if (uniqm == nullptr) { 1790 return false; // match Dependencies::is_concrete_method() behavior 1791 } 1792 // Theoretically, the "direction" of subtype check matters here. 1793 // On one hand, in case of interface context with a single implementor, uniqm can be in a superclass of the implementor which 1794 // is not related to context class. 1795 // On another hand, uniqm could come from an interface unrelated to the context class, but right now it is not possible: 1796 // it is required that uniqm->method_holder() is the participant (uniqm->method_holder() <: ctxk), hence a default method 1797 // can't be used as unique. 1798 if (ctxk->is_interface()) { 1799 InstanceKlass* implementor = ctxk->implementor(); 1800 assert(implementor != ctxk, "single implementor only"); // should have been invalidated earlier 1801 ctxk = implementor; 1802 } 1803 InstanceKlass* holder = uniqm->method_holder(); 1804 assert(!holder->is_interface(), "no default methods allowed"); 1805 assert(ctxk->is_subclass_of(holder) || holder->is_subclass_of(ctxk), "not related"); 1806 return ctxk->is_subclass_of(holder); 1807 } 1808 1809 // If a class (or interface) has a unique concrete method uniqm, return nullptr. 1810 // Otherwise, return a class that contains an interfering method. 1811 Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk, 1812 Method* uniqm, 1813 NewKlassDepChange* changes) { 1814 ConcreteMethodFinder wf(uniqm, uniqm->method_holder()); 1815 Klass* k = wf.find_witness(ctxk, changes); 1816 if (k != nullptr) { 1817 return k; 1818 } 1819 if (!Dependencies::is_concrete_root_method(uniqm, ctxk) || changes != nullptr) { 1820 Klass* conck = find_witness_AME(ctxk, uniqm, changes); 1821 if (conck != nullptr) { 1822 // Found a concrete subtype 'conck' which does not override abstract root method. 1823 return conck; 1824 } 1825 } 1826 return nullptr; 1827 } 1828 1829 Klass* Dependencies::check_unique_implementor(InstanceKlass* ctxk, Klass* uniqk, NewKlassDepChange* changes) { 1830 assert(ctxk->is_interface(), "sanity"); 1831 assert(ctxk->nof_implementors() > 0, "no implementors"); 1832 if (ctxk->nof_implementors() == 1) { 1833 assert(ctxk->implementor() == uniqk, "sanity"); 1834 return nullptr; 1835 } 1836 return ctxk; // no unique implementor 1837 } 1838 1839 // Search for AME. 1840 // There are two version of checks. 1841 // 1) Spot checking version(Classload time). Newly added class is checked for AME. 1842 // Checks whether abstract/overpass method is inherited into/declared in newly added concrete class. 1843 // 2) Compile time analysis for abstract/overpass(abstract klass) root_m. The non uniqm subtrees are checked for concrete classes. 1844 Klass* Dependencies::find_witness_AME(InstanceKlass* ctxk, Method* m, KlassDepChange* changes) { 1845 if (m != nullptr) { 1846 if (changes != nullptr) { 1847 // Spot checking version. 1848 ConcreteMethodFinder wf(m); 1849 Klass* new_type = changes->as_new_klass_change()->new_type(); 1850 if (wf.witnessed_reabstraction_in_supers(new_type)) { 1851 return new_type; 1852 } 1853 } else { 1854 // Note: It is required that uniqm->method_holder() is the participant (see ClassHierarchyWalker::found_method()). 1855 ConcreteSubtypeFinder wf(m->method_holder()); 1856 Klass* conck = wf.find_witness(ctxk); 1857 if (conck != nullptr) { 1858 Method* cm = InstanceKlass::cast(conck)->find_instance_method(m->name(), m->signature(), Klass::PrivateLookupMode::skip); 1859 if (!Dependencies::is_concrete_method(cm, conck)) { 1860 return conck; 1861 } 1862 } 1863 } 1864 } 1865 return nullptr; 1866 } 1867 1868 // This function is used by find_unique_concrete_method(non vtable based) 1869 // to check whether subtype method overrides the base method. 1870 static bool overrides(Method* sub_m, Method* base_m) { 1871 assert(base_m != nullptr, "base method should be non null"); 1872 if (sub_m == nullptr) { 1873 return false; 1874 } 1875 /** 1876 * If base_m is public or protected then sub_m always overrides. 1877 * If base_m is !public, !protected and !private (i.e. base_m is package private) 1878 * then sub_m should be in the same package as that of base_m. 1879 * For package private base_m this is conservative approach as it allows only subset of all allowed cases in 1880 * the jvm specification. 1881 **/ 1882 if (base_m->is_public() || base_m->is_protected() || 1883 base_m->method_holder()->is_same_class_package(sub_m->method_holder())) { 1884 return true; 1885 } 1886 return false; 1887 } 1888 1889 // Find the set of all non-abstract methods under ctxk that match m. 1890 // (The method m must be defined or inherited in ctxk.) 1891 // Include m itself in the set, unless it is abstract. 1892 // If this set has exactly one element, return that element. 1893 Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass** participant) { 1894 // Return nullptr if m is marked old; must have been a redefined method. 1895 if (m->is_old()) { 1896 return nullptr; 1897 } 1898 if (m->is_default_method()) { 1899 return nullptr; // not supported 1900 } 1901 assert(verify_method_context(ctxk, m), "proper context"); 1902 ConcreteMethodFinder wf(m); 1903 wf.record_witnesses(1); 1904 Klass* wit = wf.find_witness(ctxk); 1905 if (wit != nullptr) return nullptr; // Too many witnesses. 1906 Method* fm = wf.found_method(0); // Will be nullptr if num_parts == 0. 1907 if (participant != nullptr) { 1908 (*participant) = wf.participant(0); 1909 } 1910 if (!Dependencies::is_concrete_method(fm, nullptr)) { 1911 fm = nullptr; // ignore abstract methods 1912 } 1913 if (Dependencies::is_concrete_method(m, ctxk)) { 1914 if (fm == nullptr) { 1915 // It turns out that m was always the only implementation. 1916 fm = m; 1917 } else if (fm != m) { 1918 // Two conflicting implementations after all. 1919 // (This can happen if m is inherited into ctxk and fm overrides it.) 1920 return nullptr; 1921 } 1922 } else if (Dependencies::find_witness_AME(ctxk, fm) != nullptr) { 1923 // Found a concrete subtype which does not override abstract root method. 1924 return nullptr; 1925 } else if (!overrides(fm, m)) { 1926 // Found method doesn't override abstract root method. 1927 return nullptr; 1928 } 1929 assert(Dependencies::is_concrete_root_method(fm, ctxk) == Dependencies::is_concrete_method(m, ctxk), "mismatch"); 1930 #ifndef PRODUCT 1931 // Make sure the dependency mechanism will pass this discovery: 1932 if (VerifyDependencies && fm != nullptr) { 1933 guarantee(nullptr == (void *)check_unique_concrete_method(ctxk, fm), 1934 "verify dep."); 1935 } 1936 #endif //PRODUCT 1937 return fm; 1938 } 1939 1940 // If a class (or interface) has a unique concrete method uniqm, return nullptr. 1941 // Otherwise, return a class that contains an interfering method. 1942 Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk, 1943 Method* uniqm, 1944 Klass* resolved_klass, 1945 Method* resolved_method, 1946 KlassDepChange* changes) { 1947 assert(!ctxk->is_interface() || ctxk == resolved_klass, "sanity"); 1948 assert(!resolved_method->can_be_statically_bound() || resolved_method == uniqm, "sanity"); 1949 assert(resolved_klass->is_subtype_of(resolved_method->method_holder()), "sanity"); 1950 1951 if (!InstanceKlass::cast(resolved_klass)->is_linked() || 1952 !resolved_method->method_holder()->is_linked() || 1953 resolved_method->can_be_statically_bound()) { 1954 // Dependency is redundant, but benign. Just keep it to avoid unnecessary recompilation. 1955 return nullptr; // no vtable index available 1956 } 1957 1958 LinkedConcreteMethodFinder mf(InstanceKlass::cast(resolved_klass), resolved_method, uniqm); 1959 return mf.find_witness(ctxk, changes); 1960 } 1961 1962 // Find the set of all non-abstract methods under ctxk that match m. 1963 // (The method m must be defined or inherited in ctxk.) 1964 // Include m itself in the set, unless it is abstract. 1965 // If this set has exactly one element, return that element. 1966 // Not yet linked subclasses of ctxk are ignored since they don't have any instances yet. 1967 // Additionally, resolved_klass and resolved_method complete the description of the call site being analyzed. 1968 Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass* resolved_klass, Method* resolved_method) { 1969 // Return nullptr if m is marked old; must have been a redefined method. 1970 if (m->is_old()) { 1971 return nullptr; 1972 } 1973 if (!InstanceKlass::cast(resolved_klass)->is_linked() || 1974 !resolved_method->method_holder()->is_linked() || 1975 resolved_method->can_be_statically_bound()) { 1976 return m; // nothing to do: no witness under ctxk 1977 } 1978 LinkedConcreteMethodFinder wf(InstanceKlass::cast(resolved_klass), resolved_method); 1979 assert(Dependencies::verify_method_context(ctxk, m), "proper context"); 1980 wf.record_witnesses(1); 1981 Klass* wit = wf.find_witness(ctxk); 1982 if (wit != nullptr) { 1983 return nullptr; // Too many witnesses. 1984 } 1985 // p == nullptr when no participants are found (wf.num_participants() == 0). 1986 // fm == nullptr case has 2 meanings: 1987 // * when p == nullptr: no method found; 1988 // * when p != nullptr: AbstractMethodError-throwing method found. 1989 // Also, found method should always be accompanied by a participant class. 1990 Klass* p = wf.participant(0); 1991 Method* fm = wf.found_method(0); 1992 assert(fm == nullptr || p != nullptr, "no participant"); 1993 // Normalize all error-throwing cases to nullptr. 1994 if (fm == Universe::throw_illegal_access_error() || 1995 fm == Universe::throw_no_such_method_error() || 1996 !Dependencies::is_concrete_method(fm, p)) { 1997 fm = nullptr; // error-throwing method 1998 } 1999 if (Dependencies::is_concrete_method(m, ctxk)) { 2000 if (p == nullptr) { 2001 // It turns out that m was always the only implementation. 2002 assert(fm == nullptr, "sanity"); 2003 fm = m; 2004 } 2005 } 2006 #ifndef PRODUCT 2007 // Make sure the dependency mechanism will pass this discovery: 2008 if (VerifyDependencies && fm != nullptr) { 2009 guarantee(nullptr == check_unique_concrete_method(ctxk, fm, resolved_klass, resolved_method), 2010 "verify dep."); 2011 } 2012 #endif // PRODUCT 2013 assert(fm == nullptr || !fm->is_abstract(), "sanity"); 2014 // Old CHA conservatively reports concrete methods in abstract classes 2015 // irrespective of whether they have concrete subclasses or not. 2016 // Also, abstract root method case is not fully supported. 2017 #ifdef ASSERT 2018 Klass* uniqp = nullptr; 2019 Method* uniqm = Dependencies::find_unique_concrete_method(ctxk, m, &uniqp); 2020 assert(uniqm == nullptr || uniqm == fm || 2021 m->is_abstract() || 2022 uniqm->method_holder()->is_abstract() || 2023 (fm == nullptr && uniqm != nullptr && uniqp != nullptr && !InstanceKlass::cast(uniqp)->is_linked()), 2024 "sanity"); 2025 #endif // ASSERT 2026 return fm; 2027 } 2028 2029 Klass* Dependencies::check_has_no_finalizable_subclasses(InstanceKlass* ctxk, NewKlassDepChange* changes) { 2030 InstanceKlass* search_at = ctxk; 2031 if (changes != nullptr) { 2032 search_at = changes->new_type(); // just look at the new bit 2033 } 2034 return find_finalizable_subclass(search_at); 2035 } 2036 2037 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) { 2038 assert(call_site != nullptr, "sanity"); 2039 assert(method_handle != nullptr, "sanity"); 2040 assert(call_site->is_a(vmClasses::CallSite_klass()), "sanity"); 2041 2042 if (changes == nullptr) { 2043 // Validate all CallSites 2044 if (java_lang_invoke_CallSite::target(call_site) != method_handle) 2045 return call_site->klass(); // assertion failed 2046 } else { 2047 // Validate the given CallSite 2048 if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) { 2049 assert(method_handle != changes->method_handle(), "must be"); 2050 return call_site->klass(); // assertion failed 2051 } 2052 } 2053 return nullptr; // assertion still valid 2054 } 2055 2056 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) { 2057 if (_verify_in_progress) return; // don't log 2058 if (witness != nullptr) { 2059 LogTarget(Debug, dependencies) lt; 2060 if (lt.is_enabled()) { 2061 LogStream ls(<); 2062 print_dependency(&ls, witness, /*verbose=*/ true); 2063 } 2064 // The following is a no-op unless logging is enabled: 2065 log_dependency(witness); 2066 } 2067 } 2068 2069 Klass* Dependencies::DepStream::check_new_klass_dependency(NewKlassDepChange* changes) { 2070 assert_locked_or_safepoint(Compile_lock); 2071 Dependencies::check_valid_dependency_type(type()); 2072 2073 Klass* witness = nullptr; 2074 switch (type()) { 2075 case evol_method: 2076 witness = check_evol_method(method_argument(0)); 2077 break; 2078 case leaf_type: 2079 witness = check_leaf_type(context_type()); 2080 break; 2081 case abstract_with_unique_concrete_subtype: 2082 witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes); 2083 break; 2084 case unique_concrete_method_2: 2085 witness = check_unique_concrete_method(context_type(), method_argument(1), changes); 2086 break; 2087 case unique_concrete_method_4: 2088 witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes); 2089 break; 2090 case unique_implementor: 2091 witness = check_unique_implementor(context_type(), type_argument(1), changes); 2092 break; 2093 case no_finalizable_subclasses: 2094 witness = check_has_no_finalizable_subclasses(context_type(), changes); 2095 break; 2096 default: 2097 witness = nullptr; 2098 break; 2099 } 2100 trace_and_log_witness(witness); 2101 return witness; 2102 } 2103 2104 Klass* Dependencies::DepStream::check_klass_init_dependency(KlassInitDepChange* changes) { 2105 assert_locked_or_safepoint(Compile_lock); 2106 Dependencies::check_valid_dependency_type(type()); 2107 2108 // No new types added. Only unique_concrete_method_4 is sensitive to class initialization changes. 2109 Klass* witness = nullptr; 2110 switch (type()) { 2111 case unique_concrete_method_4: 2112 witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes); 2113 break; 2114 default: 2115 witness = nullptr; 2116 break; 2117 } 2118 trace_and_log_witness(witness); 2119 return witness; 2120 } 2121 2122 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) { 2123 assert_locked_or_safepoint(Compile_lock); 2124 Dependencies::check_valid_dependency_type(type()); 2125 2126 if (changes != nullptr) { 2127 if (changes->is_klass_init_change()) { 2128 return check_klass_init_dependency(changes->as_klass_init_change()); 2129 } else { 2130 return check_new_klass_dependency(changes->as_new_klass_change()); 2131 } 2132 } else { 2133 Klass* witness = check_new_klass_dependency(nullptr); 2134 // check_klass_init_dependency duplicates check_new_klass_dependency checks when class hierarchy change info is absent. 2135 assert(witness != nullptr || check_klass_init_dependency(nullptr) == nullptr, "missed dependency"); 2136 return witness; 2137 } 2138 } 2139 2140 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) { 2141 assert_locked_or_safepoint(Compile_lock); 2142 Dependencies::check_valid_dependency_type(type()); 2143 2144 Klass* witness = nullptr; 2145 switch (type()) { 2146 case call_site_target_value: 2147 witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes); 2148 break; 2149 default: 2150 witness = nullptr; 2151 break; 2152 } 2153 trace_and_log_witness(witness); 2154 return witness; 2155 } 2156 2157 2158 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) { 2159 // Handle klass dependency 2160 if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type())) 2161 return check_klass_dependency(changes.as_klass_change()); 2162 2163 // Handle CallSite dependency 2164 if (changes.is_call_site_change()) 2165 return check_call_site_dependency(changes.as_call_site_change()); 2166 2167 // irrelevant dependency; skip it 2168 return nullptr; 2169 } 2170 2171 2172 void DepChange::print() { print_on(tty); } 2173 2174 void DepChange::print_on(outputStream* st) { 2175 int nsup = 0, nint = 0; 2176 for (ContextStream str(*this); str.next(); ) { 2177 InstanceKlass* k = str.klass(); 2178 switch (str.change_type()) { 2179 case Change_new_type: 2180 st->print_cr(" dependee = %s", k->external_name()); 2181 break; 2182 case Change_new_sub: 2183 if (!WizardMode) { 2184 ++nsup; 2185 } else { 2186 st->print_cr(" context super = %s", k->external_name()); 2187 } 2188 break; 2189 case Change_new_impl: 2190 if (!WizardMode) { 2191 ++nint; 2192 } else { 2193 st->print_cr(" context interface = %s", k->external_name()); 2194 } 2195 break; 2196 default: 2197 break; 2198 } 2199 } 2200 if (nsup + nint != 0) { 2201 st->print_cr(" context supers = %d, interfaces = %d", nsup, nint); 2202 } 2203 } 2204 2205 void DepChange::ContextStream::start() { 2206 InstanceKlass* type = (_changes.is_klass_change() ? _changes.as_klass_change()->type() : (InstanceKlass*) nullptr); 2207 _change_type = (type == nullptr ? NO_CHANGE : Start_Klass); 2208 _klass = type; 2209 _ti_base = nullptr; 2210 _ti_index = 0; 2211 _ti_limit = 0; 2212 } 2213 2214 bool DepChange::ContextStream::next() { 2215 switch (_change_type) { 2216 case Start_Klass: // initial state; _klass is the new type 2217 _ti_base = _klass->transitive_interfaces(); 2218 _ti_index = 0; 2219 _change_type = Change_new_type; 2220 return true; 2221 case Change_new_type: 2222 // fall through: 2223 _change_type = Change_new_sub; 2224 case Change_new_sub: 2225 // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277 2226 { 2227 _klass = _klass->java_super(); 2228 if (_klass != nullptr) { 2229 return true; 2230 } 2231 } 2232 // else set up _ti_limit and fall through: 2233 _ti_limit = (_ti_base == nullptr) ? 0 : _ti_base->length(); 2234 _change_type = Change_new_impl; 2235 case Change_new_impl: 2236 if (_ti_index < _ti_limit) { 2237 _klass = _ti_base->at(_ti_index++); 2238 return true; 2239 } 2240 // fall through: 2241 _change_type = NO_CHANGE; // iterator is exhausted 2242 case NO_CHANGE: 2243 break; 2244 default: 2245 ShouldNotReachHere(); 2246 } 2247 return false; 2248 } 2249 2250 void KlassDepChange::initialize() { 2251 // entire transaction must be under this lock: 2252 assert_lock_strong(Compile_lock); 2253 2254 // Mark all dependee and all its superclasses 2255 // Mark transitive interfaces 2256 for (ContextStream str(*this); str.next(); ) { 2257 InstanceKlass* d = str.klass(); 2258 assert(!d->is_marked_dependent(), "checking"); 2259 d->set_is_marked_dependent(true); 2260 } 2261 } 2262 2263 KlassDepChange::~KlassDepChange() { 2264 // Unmark all dependee and all its superclasses 2265 // Unmark transitive interfaces 2266 for (ContextStream str(*this); str.next(); ) { 2267 InstanceKlass* d = str.klass(); 2268 d->set_is_marked_dependent(false); 2269 } 2270 } 2271 2272 bool KlassDepChange::involves_context(Klass* k) { 2273 if (k == nullptr || !k->is_instance_klass()) { 2274 return false; 2275 } 2276 InstanceKlass* ik = InstanceKlass::cast(k); 2277 bool is_contained = ik->is_marked_dependent(); 2278 assert(is_contained == type()->is_subtype_of(k), 2279 "correct marking of potential context types"); 2280 return is_contained; 2281 } 2282 2283 void Dependencies::print_statistics() { 2284 AbstractClassHierarchyWalker::print_statistics(); 2285 } 2286 2287 void AbstractClassHierarchyWalker::print_statistics() { 2288 if (UsePerfData) { 2289 jlong deps_find_witness_calls = _perf_find_witness_anywhere_calls_count->get_value(); 2290 jlong deps_find_witness_steps = _perf_find_witness_anywhere_steps_count->get_value(); 2291 jlong deps_find_witness_singles = _perf_find_witness_in_calls_count->get_value(); 2292 2293 ttyLocker ttyl; 2294 tty->print_cr("Dependency check (find_witness) " 2295 "calls=" JLONG_FORMAT ", steps=" JLONG_FORMAT " (avg=%.1f), singles=" JLONG_FORMAT, 2296 deps_find_witness_calls, 2297 deps_find_witness_steps, 2298 (double)deps_find_witness_steps / deps_find_witness_calls, 2299 deps_find_witness_singles); 2300 if (xtty != nullptr) { 2301 xtty->elem("deps_find_witness calls='" JLONG_FORMAT "' steps='" JLONG_FORMAT "' singles='" JLONG_FORMAT "'", 2302 deps_find_witness_calls, 2303 deps_find_witness_steps, 2304 deps_find_witness_singles); 2305 } 2306 } 2307 } 2308 2309 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) : 2310 _call_site(call_site), 2311 _method_handle(method_handle) { 2312 assert(_call_site()->is_a(vmClasses::CallSite_klass()), "must be"); 2313 assert(_method_handle.is_null() || _method_handle()->is_a(vmClasses::MethodHandle_klass()), "must be"); 2314 } 2315 2316 void dependencies_init() { 2317 AbstractClassHierarchyWalker::init(); 2318 }