1 /*
2 * Copyright (c) 2005, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciArrayKlass.hpp"
26 #include "ci/ciEnv.hpp"
27 #include "ci/ciKlass.hpp"
28 #include "ci/ciMethod.hpp"
29 #include "classfile/javaClasses.inline.hpp"
30 #include "classfile/vmClasses.hpp"
31 #include "code/dependencies.hpp"
32 #include "compiler/compileBroker.hpp"
33 #include "compiler/compileLog.hpp"
34 #include "compiler/compileTask.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/klass.hpp"
37 #include "oops/method.inline.hpp"
38 #include "oops/objArrayKlass.hpp"
39 #include "oops/oop.inline.hpp"
40 #include "runtime/flags/flagSetting.hpp"
41 #include "runtime/handles.inline.hpp"
42 #include "runtime/javaThread.inline.hpp"
43 #include "runtime/jniHandles.inline.hpp"
44 #include "runtime/mutexLocker.hpp"
45 #include "runtime/perfData.hpp"
46 #include "runtime/vmThread.hpp"
47 #include "utilities/copy.hpp"
48
49
50 #ifdef ASSERT
51 static bool must_be_in_vm() {
52 Thread* thread = Thread::current();
53 if (thread->is_Java_thread()) {
54 return JavaThread::cast(thread)->thread_state() == _thread_in_vm;
55 } else {
56 return true; // Could be VMThread or GC thread
57 }
58 }
59 #endif //ASSERT
60
61 bool Dependencies::_verify_in_progress = false; // don't -Xlog:dependencies
62
63 void Dependencies::initialize(ciEnv* env) {
64 Arena* arena = env->arena();
65 _oop_recorder = env->oop_recorder();
66 _log = env->log();
67 _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
68 #if INCLUDE_JVMCI
69 _using_dep_values = false;
70 #endif
71 DEBUG_ONLY(_deps[end_marker] = nullptr);
72 for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
73 _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, nullptr);
74 }
75 _content_bytes = nullptr;
76 _size_in_bytes = (size_t)-1;
77
78 assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
79 }
80
81 void Dependencies::assert_evol_method(ciMethod* m) {
82 assert_common_1(evol_method, m);
83 }
84
85 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
86 if (ctxk->is_array_klass()) {
87 // As a special case, support this assertion on an array type,
88 // which reduces to an assertion on its element type.
89 // Note that this cannot be done with assertions that
90 // relate to concreteness or abstractness.
91 ciType* elemt = ctxk->as_array_klass()->base_element_type();
92 if (!elemt->is_instance_klass()) return; // Ex: int[][]
93 ctxk = elemt->as_instance_klass();
94 //if (ctxk->is_final()) return; // Ex: String[][]
95 }
96 check_ctxk(ctxk);
97 assert_common_1(leaf_type, ctxk);
98 }
99
100 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
101 check_ctxk_abstract(ctxk);
102 assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
103 }
104
105 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
106 check_ctxk(ctxk);
107 check_unique_method(ctxk, uniqm);
108 assert_common_2(unique_concrete_method_2, ctxk, uniqm);
109 }
110
111 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm, ciKlass* resolved_klass, ciMethod* resolved_method) {
112 check_ctxk(ctxk);
113 check_unique_method(ctxk, uniqm);
114 assert_common_4(unique_concrete_method_4, ctxk, uniqm, resolved_klass, resolved_method);
115 }
116
117 void Dependencies::assert_unique_implementor(ciInstanceKlass* ctxk, ciInstanceKlass* uniqk) {
118 check_ctxk(ctxk);
119 check_unique_implementor(ctxk, uniqk);
120 assert_common_2(unique_implementor, ctxk, uniqk);
121 }
122
123 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
124 check_ctxk(ctxk);
125 assert_common_1(no_finalizable_subclasses, ctxk);
126 }
127
128 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
129 assert_common_2(call_site_target_value, call_site, method_handle);
130 }
131
132 #if INCLUDE_JVMCI
133
134 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
135 _oop_recorder = oop_recorder;
136 _log = log;
137 _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
138 _using_dep_values = true;
139 DEBUG_ONLY(_dep_values[end_marker] = nullptr);
140 for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
141 _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
142 }
143 _content_bytes = nullptr;
144 _size_in_bytes = (size_t)-1;
145
146 assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
147 }
148
149 void Dependencies::assert_evol_method(Method* m) {
150 assert_common_1(evol_method, DepValue(_oop_recorder, m));
151 }
152
153 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
154 check_ctxk(ctxk);
155 assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
156 }
157
158 void Dependencies::assert_leaf_type(Klass* ctxk) {
159 if (ctxk->is_array_klass()) {
160 // As a special case, support this assertion on an array type,
161 // which reduces to an assertion on its element type.
162 // Note that this cannot be done with assertions that
163 // relate to concreteness or abstractness.
164 BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
165 if (is_java_primitive(elemt)) return; // Ex: int[][]
166 ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
167 //if (ctxk->is_final()) return; // Ex: String[][]
168 }
169 check_ctxk(ctxk);
170 assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
171 }
172
173 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
174 check_ctxk_abstract(ctxk);
175 DepValue ctxk_dv(_oop_recorder, ctxk);
176 DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
177 assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
178 }
179
180 void Dependencies::assert_unique_implementor(InstanceKlass* ctxk, InstanceKlass* uniqk) {
181 check_ctxk(ctxk);
182 assert(ctxk->is_interface(), "not an interface");
183 assert(ctxk->implementor() == uniqk, "not a unique implementor");
184 assert_common_2(unique_implementor, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqk));
185 }
186
187 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
188 check_ctxk(ctxk);
189 check_unique_method(ctxk, uniqm);
190 assert_common_2(unique_concrete_method_2, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
191 }
192
193 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
194 assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
195 }
196
197 #endif // INCLUDE_JVMCI
198
199
200 // Helper function. If we are adding a new dep. under ctxk2,
201 // try to find an old dep. under a broader* ctxk1. If there is
202 //
203 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
204 int ctxk_i, ciKlass* ctxk2) {
205 ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
206 if (ctxk2->is_subtype_of(ctxk1)) {
207 return true; // success, and no need to change
208 } else if (ctxk1->is_subtype_of(ctxk2)) {
209 // new context class fully subsumes previous one
210 deps->at_put(ctxk_i, ctxk2);
211 return true;
212 } else {
213 return false;
214 }
215 }
216
217 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
218 assert(dep_args(dept) == 1, "sanity");
219 log_dependency(dept, x);
220 GrowableArray<ciBaseObject*>* deps = _deps[dept];
221
222 // see if the same (or a similar) dep is already recorded
223 if (note_dep_seen(dept, x)) {
224 assert(deps->find(x) >= 0, "sanity");
225 } else {
226 deps->append(x);
227 }
228 }
229
230 void Dependencies::assert_common_2(DepType dept,
231 ciBaseObject* x0, ciBaseObject* x1) {
232 assert(dep_args(dept) == 2, "sanity");
233 log_dependency(dept, x0, x1);
234 GrowableArray<ciBaseObject*>* deps = _deps[dept];
235
236 // see if the same (or a similar) dep is already recorded
237 bool has_ctxk = has_explicit_context_arg(dept);
238 if (has_ctxk) {
239 assert(dep_context_arg(dept) == 0, "sanity");
240 if (note_dep_seen(dept, x1)) {
241 // look in this bucket for redundant assertions
242 const int stride = 2;
243 for (int i = deps->length(); (i -= stride) >= 0; ) {
244 ciBaseObject* y1 = deps->at(i+1);
245 if (x1 == y1) { // same subject; check the context
246 if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
247 return;
248 }
249 }
250 }
251 }
252 } else {
253 bool dep_seen_x0 = note_dep_seen(dept, x0); // records x0 for future queries
254 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries
255 if (dep_seen_x0 && dep_seen_x1) {
256 // look in this bucket for redundant assertions
257 const int stride = 2;
258 for (int i = deps->length(); (i -= stride) >= 0; ) {
259 ciBaseObject* y0 = deps->at(i+0);
260 ciBaseObject* y1 = deps->at(i+1);
261 if (x0 == y0 && x1 == y1) {
262 return;
263 }
264 }
265 }
266 }
267
268 // append the assertion in the correct bucket:
269 deps->append(x0);
270 deps->append(x1);
271 }
272
273 void Dependencies::assert_common_4(DepType dept,
274 ciKlass* ctxk, ciBaseObject* x1, ciBaseObject* x2, ciBaseObject* x3) {
275 assert(has_explicit_context_arg(dept), "sanity");
276 assert(dep_context_arg(dept) == 0, "sanity");
277 assert(dep_args(dept) == 4, "sanity");
278 log_dependency(dept, ctxk, x1, x2, x3);
279 GrowableArray<ciBaseObject*>* deps = _deps[dept];
280
281 // see if the same (or a similar) dep is already recorded
282 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries
283 bool dep_seen_x2 = note_dep_seen(dept, x2); // records x2 for future queries
284 bool dep_seen_x3 = note_dep_seen(dept, x3); // records x3 for future queries
285 if (dep_seen_x1 && dep_seen_x2 && dep_seen_x3) {
286 // look in this bucket for redundant assertions
287 const int stride = 4;
288 for (int i = deps->length(); (i -= stride) >= 0; ) {
289 ciBaseObject* y1 = deps->at(i+1);
290 ciBaseObject* y2 = deps->at(i+2);
291 ciBaseObject* y3 = deps->at(i+3);
292 if (x1 == y1 && x2 == y2 && x3 == y3) { // same subjects; check the context
293 if (maybe_merge_ctxk(deps, i+0, ctxk)) {
294 return;
295 }
296 }
297 }
298 }
299 // append the assertion in the correct bucket:
300 deps->append(ctxk);
301 deps->append(x1);
302 deps->append(x2);
303 deps->append(x3);
304 }
305
306 #if INCLUDE_JVMCI
307 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
308 int ctxk_i, DepValue ctxk2_dv) {
309 Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
310 Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
311 if (ctxk2->is_subtype_of(ctxk1)) {
312 return true; // success, and no need to change
313 } else if (ctxk1->is_subtype_of(ctxk2)) {
314 // new context class fully subsumes previous one
315 deps->at_put(ctxk_i, ctxk2_dv);
316 return true;
317 } else {
318 return false;
319 }
320 }
321
322 void Dependencies::assert_common_1(DepType dept, DepValue x) {
323 assert(dep_args(dept) == 1, "sanity");
324 //log_dependency(dept, x);
325 GrowableArray<DepValue>* deps = _dep_values[dept];
326
327 // see if the same (or a similar) dep is already recorded
328 if (note_dep_seen(dept, x)) {
329 assert(deps->find(x) >= 0, "sanity");
330 } else {
331 deps->append(x);
332 }
333 }
334
335 void Dependencies::assert_common_2(DepType dept,
336 DepValue x0, DepValue x1) {
337 assert(dep_args(dept) == 2, "sanity");
338 //log_dependency(dept, x0, x1);
339 GrowableArray<DepValue>* deps = _dep_values[dept];
340
341 // see if the same (or a similar) dep is already recorded
342 bool has_ctxk = has_explicit_context_arg(dept);
343 if (has_ctxk) {
344 assert(dep_context_arg(dept) == 0, "sanity");
345 if (note_dep_seen(dept, x1)) {
346 // look in this bucket for redundant assertions
347 const int stride = 2;
348 for (int i = deps->length(); (i -= stride) >= 0; ) {
349 DepValue y1 = deps->at(i+1);
350 if (x1 == y1) { // same subject; check the context
351 if (maybe_merge_ctxk(deps, i+0, x0)) {
352 return;
353 }
354 }
355 }
356 }
357 } else {
358 bool dep_seen_x0 = note_dep_seen(dept, x0); // records x0 for future queries
359 bool dep_seen_x1 = note_dep_seen(dept, x1); // records x1 for future queries
360 if (dep_seen_x0 && dep_seen_x1) {
361 // look in this bucket for redundant assertions
362 const int stride = 2;
363 for (int i = deps->length(); (i -= stride) >= 0; ) {
364 DepValue y0 = deps->at(i+0);
365 DepValue y1 = deps->at(i+1);
366 if (x0 == y0 && x1 == y1) {
367 return;
368 }
369 }
370 }
371 }
372
373 // append the assertion in the correct bucket:
374 deps->append(x0);
375 deps->append(x1);
376 }
377 #endif // INCLUDE_JVMCI
378
379 /// Support for encoding dependencies into an nmethod:
380
381 void Dependencies::copy_to(nmethod* nm) {
382 address beg = nm->dependencies_begin();
383 address end = nm->dependencies_end();
384 guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
385 (void)memcpy(beg, content_bytes(), size_in_bytes());
386 assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
387 }
388
389 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
390 for (int i = 0; i < narg; i++) {
391 int diff = p1[i]->ident() - p2[i]->ident();
392 if (diff != 0) return diff;
393 }
394 return 0;
395 }
396 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
397 { return sort_dep(p1, p2, 1); }
398 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
399 { return sort_dep(p1, p2, 2); }
400 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
401 { return sort_dep(p1, p2, 3); }
402 static int sort_dep_arg_4(ciBaseObject** p1, ciBaseObject** p2)
403 { return sort_dep(p1, p2, 4); }
404
405 #if INCLUDE_JVMCI
406 // metadata deps are sorted before object deps
407 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
408 for (int i = 0; i < narg; i++) {
409 int diff = p1[i].sort_key() - p2[i].sort_key();
410 if (diff != 0) return diff;
411 }
412 return 0;
413 }
414 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
415 { return sort_dep_value(p1, p2, 1); }
416 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
417 { return sort_dep_value(p1, p2, 2); }
418 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
419 { return sort_dep_value(p1, p2, 3); }
420 #endif // INCLUDE_JVMCI
421
422 void Dependencies::sort_all_deps() {
423 #if INCLUDE_JVMCI
424 if (_using_dep_values) {
425 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
426 DepType dept = (DepType)deptv;
427 GrowableArray<DepValue>* deps = _dep_values[dept];
428 if (deps->length() <= 1) continue;
429 switch (dep_args(dept)) {
430 case 1: deps->sort(sort_dep_value_arg_1, 1); break;
431 case 2: deps->sort(sort_dep_value_arg_2, 2); break;
432 case 3: deps->sort(sort_dep_value_arg_3, 3); break;
433 default: ShouldNotReachHere(); break;
434 }
435 }
436 return;
437 }
438 #endif // INCLUDE_JVMCI
439 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
440 DepType dept = (DepType)deptv;
441 GrowableArray<ciBaseObject*>* deps = _deps[dept];
442 if (deps->length() <= 1) continue;
443 switch (dep_args(dept)) {
444 case 1: deps->sort(sort_dep_arg_1, 1); break;
445 case 2: deps->sort(sort_dep_arg_2, 2); break;
446 case 3: deps->sort(sort_dep_arg_3, 3); break;
447 case 4: deps->sort(sort_dep_arg_4, 4); break;
448 default: ShouldNotReachHere(); break;
449 }
450 }
451 }
452
453 size_t Dependencies::estimate_size_in_bytes() {
454 size_t est_size = 100;
455 #if INCLUDE_JVMCI
456 if (_using_dep_values) {
457 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
458 DepType dept = (DepType)deptv;
459 GrowableArray<DepValue>* deps = _dep_values[dept];
460 est_size += deps->length() * 2; // tags and argument(s)
461 }
462 return est_size;
463 }
464 #endif // INCLUDE_JVMCI
465 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
466 DepType dept = (DepType)deptv;
467 GrowableArray<ciBaseObject*>* deps = _deps[dept];
468 est_size += deps->length()*2; // tags and argument(s)
469 }
470 return est_size;
471 }
472
473 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
474 switch (dept) {
475 case unique_concrete_method_2:
476 case unique_concrete_method_4:
477 return x->as_metadata()->as_method()->holder();
478 default:
479 return nullptr; // let nullptr be nullptr
480 }
481 }
482
483 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
484 assert(must_be_in_vm(), "raw oops here");
485 switch (dept) {
486 case unique_concrete_method_2:
487 case unique_concrete_method_4:
488 assert(x->is_method(), "sanity");
489 return ((Method*)x)->method_holder();
490 default:
491 return nullptr; // let nullptr be nullptr
492 }
493 }
494
495 void Dependencies::encode_content_bytes() {
496 sort_all_deps();
497
498 // cast is safe, no deps can overflow INT_MAX
499 CompressedWriteStream bytes((int)estimate_size_in_bytes());
500
501 #if INCLUDE_JVMCI
502 if (_using_dep_values) {
503 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
504 DepType dept = (DepType)deptv;
505 GrowableArray<DepValue>* deps = _dep_values[dept];
506 if (deps->length() == 0) continue;
507 int stride = dep_args(dept);
508 int ctxkj = dep_context_arg(dept); // -1 if no context arg
509 assert(stride > 0, "sanity");
510 for (int i = 0; i < deps->length(); i += stride) {
511 jbyte code_byte = (jbyte)dept;
512 int skipj = -1;
513 if (ctxkj >= 0 && ctxkj+1 < stride) {
514 Klass* ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
515 DepValue x = deps->at(i+ctxkj+1); // following argument
516 if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
517 skipj = ctxkj; // we win: maybe one less oop to keep track of
518 code_byte |= default_context_type_bit;
519 }
520 }
521 bytes.write_byte(code_byte);
522 for (int j = 0; j < stride; j++) {
523 if (j == skipj) continue;
524 DepValue v = deps->at(i+j);
525 int idx = v.index();
526 bytes.write_int(idx);
527 }
528 }
529 }
530 } else {
531 #endif // INCLUDE_JVMCI
532 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
533 DepType dept = (DepType)deptv;
534 GrowableArray<ciBaseObject*>* deps = _deps[dept];
535 if (deps->length() == 0) continue;
536 int stride = dep_args(dept);
537 int ctxkj = dep_context_arg(dept); // -1 if no context arg
538 assert(stride > 0, "sanity");
539 for (int i = 0; i < deps->length(); i += stride) {
540 jbyte code_byte = (jbyte)dept;
541 int skipj = -1;
542 if (ctxkj >= 0 && ctxkj+1 < stride) {
543 ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
544 ciBaseObject* x = deps->at(i+ctxkj+1); // following argument
545 if (ctxk == ctxk_encoded_as_null(dept, x)) {
546 skipj = ctxkj; // we win: maybe one less oop to keep track of
547 code_byte |= default_context_type_bit;
548 }
549 }
550 bytes.write_byte(code_byte);
551 for (int j = 0; j < stride; j++) {
552 if (j == skipj) continue;
553 ciBaseObject* v = deps->at(i+j);
554 int idx;
555 if (v->is_object()) {
556 idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
557 } else {
558 ciMetadata* meta = v->as_metadata();
559 idx = _oop_recorder->find_index(meta->constant_encoding());
560 }
561 bytes.write_int(idx);
562 }
563 }
564 }
565 #if INCLUDE_JVMCI
566 }
567 #endif
568
569 // write a sentinel byte to mark the end
570 bytes.write_byte(end_marker);
571
572 // round it out to a word boundary
573 while (bytes.position() % sizeof(HeapWord) != 0) {
574 bytes.write_byte(end_marker);
575 }
576
577 // check whether the dept byte encoding really works
578 assert((jbyte)default_context_type_bit != 0, "byte overflow");
579
580 _content_bytes = bytes.buffer();
581 _size_in_bytes = bytes.position();
582 }
583
584
585 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
586 "end_marker",
587 "evol_method",
588 "leaf_type",
589 "abstract_with_unique_concrete_subtype",
590 "unique_concrete_method_2",
591 "unique_concrete_method_4",
592 "unique_implementor",
593 "no_finalizable_subclasses",
594 "call_site_target_value"
595 };
596
597 int Dependencies::_dep_args[TYPE_LIMIT] = {
598 -1,// end_marker
599 1, // evol_method m
600 1, // leaf_type ctxk
601 2, // abstract_with_unique_concrete_subtype ctxk, k
602 2, // unique_concrete_method_2 ctxk, m
603 4, // unique_concrete_method_4 ctxk, m, resolved_klass, resolved_method
604 2, // unique_implementor ctxk, implementor
605 1, // no_finalizable_subclasses ctxk
606 2 // call_site_target_value call_site, method_handle
607 };
608
609 const char* Dependencies::dep_name(Dependencies::DepType dept) {
610 if (!dept_in_mask(dept, all_types)) return "?bad-dep?";
611 return _dep_name[dept];
612 }
613
614 int Dependencies::dep_args(Dependencies::DepType dept) {
615 if (!dept_in_mask(dept, all_types)) return -1;
616 return _dep_args[dept];
617 }
618
619 void Dependencies::check_valid_dependency_type(DepType dept) {
620 guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
621 }
622
623 Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, char** failure_detail) {
624 int klass_violations = 0;
625 DepType result = end_marker;
626 for (Dependencies::DepStream deps(this); deps.next(); ) {
627 Klass* witness = deps.check_dependency();
628 if (witness != nullptr) {
629 if (klass_violations == 0) {
630 result = deps.type();
631 if (failure_detail != nullptr && klass_violations == 0) {
632 // Use a fixed size buffer to prevent the string stream from
633 // resizing in the context of an inner resource mark.
634 char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
635 stringStream st(buffer, O_BUFLEN);
636 deps.print_dependency(&st, witness, true);
637 *failure_detail = st.as_string();
638 }
639 }
640 klass_violations++;
641 if (xtty == nullptr) {
642 // If we're not logging then a single violation is sufficient,
643 // otherwise we want to log all the dependences which were
644 // violated.
645 break;
646 }
647 }
648 }
649
650 return result;
651 }
652
653 // for the sake of the compiler log, print out current dependencies:
654 void Dependencies::log_all_dependencies() {
655 if (log() == nullptr) return;
656 ResourceMark rm;
657 for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
658 DepType dept = (DepType)deptv;
659 GrowableArray<ciBaseObject*>* deps = _deps[dept];
660 int deplen = deps->length();
661 if (deplen == 0) {
662 continue;
663 }
664 int stride = dep_args(dept);
665 GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
666 for (int i = 0; i < deps->length(); i += stride) {
667 for (int j = 0; j < stride; j++) {
668 // flush out the identities before printing
669 ciargs->push(deps->at(i+j));
670 }
671 write_dependency_to(log(), dept, ciargs);
672 ciargs->clear();
673 }
674 guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
675 }
676 }
677
678 void Dependencies::write_dependency_to(CompileLog* log,
679 DepType dept,
680 GrowableArray<DepArgument>* args,
681 Klass* witness) {
682 if (log == nullptr) {
683 return;
684 }
685 ResourceMark rm;
686 ciEnv* env = ciEnv::current();
687 GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
688 for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
689 DepArgument arg = *it;
690 if (arg.is_oop()) {
691 ciargs->push(env->get_object(arg.oop_value()));
692 } else {
693 ciargs->push(env->get_metadata(arg.metadata_value()));
694 }
695 }
696 int argslen = ciargs->length();
697 Dependencies::write_dependency_to(log, dept, ciargs, witness);
698 guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
699 }
700
701 void Dependencies::write_dependency_to(CompileLog* log,
702 DepType dept,
703 GrowableArray<ciBaseObject*>* args,
704 Klass* witness) {
705 if (log == nullptr) {
706 return;
707 }
708 ResourceMark rm;
709 GrowableArray<int>* argids = new GrowableArray<int>(args->length());
710 for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
711 ciBaseObject* obj = *it;
712 if (obj->is_object()) {
713 argids->push(log->identify(obj->as_object()));
714 } else {
715 argids->push(log->identify(obj->as_metadata()));
716 }
717 }
718 if (witness != nullptr) {
719 log->begin_elem("dependency_failed");
720 } else {
721 log->begin_elem("dependency");
722 }
723 log->print(" type='%s'", dep_name(dept));
724 const int ctxkj = dep_context_arg(dept); // -1 if no context arg
725 if (ctxkj >= 0 && ctxkj < argids->length()) {
726 log->print(" ctxk='%d'", argids->at(ctxkj));
727 }
728 // write remaining arguments, if any.
729 for (int j = 0; j < argids->length(); j++) {
730 if (j == ctxkj) continue; // already logged
731 if (j == 1) {
732 log->print( " x='%d'", argids->at(j));
733 } else {
734 log->print(" x%d='%d'", j, argids->at(j));
735 }
736 }
737 if (witness != nullptr) {
738 log->object("witness", witness);
739 log->stamp();
740 }
741 log->end_elem();
742 }
743
744 void Dependencies::write_dependency_to(xmlStream* xtty,
745 DepType dept,
746 GrowableArray<DepArgument>* args,
747 Klass* witness) {
748 if (xtty == nullptr) {
749 return;
750 }
751 Thread* thread = Thread::current();
752 HandleMark rm(thread);
753 ttyLocker ttyl;
754 int ctxkj = dep_context_arg(dept); // -1 if no context arg
755 if (witness != nullptr) {
756 xtty->begin_elem("dependency_failed");
757 } else {
758 xtty->begin_elem("dependency");
759 }
760 xtty->print(" type='%s'", dep_name(dept));
761 if (ctxkj >= 0) {
762 xtty->object("ctxk", args->at(ctxkj).metadata_value());
763 }
764 // write remaining arguments, if any.
765 for (int j = 0; j < args->length(); j++) {
766 if (j == ctxkj) continue; // already logged
767 DepArgument arg = args->at(j);
768 if (j == 1) {
769 if (arg.is_oop()) {
770 xtty->object("x", Handle(thread, arg.oop_value()));
771 } else {
772 xtty->object("x", arg.metadata_value());
773 }
774 } else {
775 char xn[12];
776 os::snprintf_checked(xn, sizeof(xn), "x%d", j);
777 if (arg.is_oop()) {
778 xtty->object(xn, Handle(thread, arg.oop_value()));
779 } else {
780 xtty->object(xn, arg.metadata_value());
781 }
782 }
783 }
784 if (witness != nullptr) {
785 xtty->object("witness", witness);
786 xtty->stamp();
787 }
788 xtty->end_elem();
789 }
790
791 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
792 Klass* witness, outputStream* st) {
793 ResourceMark rm;
794 ttyLocker ttyl; // keep the following output all in one block
795 st->print_cr("%s of type %s",
796 (witness == nullptr)? "Dependency": "Failed dependency",
797 dep_name(dept));
798 // print arguments
799 int ctxkj = dep_context_arg(dept); // -1 if no context arg
800 for (int j = 0; j < args->length(); j++) {
801 DepArgument arg = args->at(j);
802 bool put_star = false;
803 if (arg.is_null()) continue;
804 const char* what;
805 if (j == ctxkj) {
806 assert(arg.is_metadata(), "must be");
807 what = "context";
808 put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
809 } else if (arg.is_method()) {
810 what = "method ";
811 put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), nullptr);
812 } else if (arg.is_klass()) {
813 what = "class ";
814 } else {
815 what = "object ";
816 }
817 st->print(" %s = %s", what, (put_star? "*": ""));
818 if (arg.is_klass()) {
819 st->print("%s", ((Klass*)arg.metadata_value())->external_name());
820 } else if (arg.is_method()) {
821 ((Method*)arg.metadata_value())->print_value_on(st);
822 } else if (arg.is_oop()) {
823 arg.oop_value()->print_value_on(st);
824 } else {
825 ShouldNotReachHere(); // Provide impl for this type.
826 }
827
828 st->cr();
829 }
830 if (witness != nullptr) {
831 bool put_star = !Dependencies::is_concrete_klass(witness);
832 st->print_cr(" witness = %s%s",
833 (put_star? "*": ""),
834 witness->external_name());
835 }
836 }
837
838 void Dependencies::DepStream::log_dependency(Klass* witness) {
839 if (_deps == nullptr && xtty == nullptr) return; // fast cutout for runtime
840 ResourceMark rm;
841 const int nargs = argument_count();
842 GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
843 for (int j = 0; j < nargs; j++) {
844 if (is_oop_argument(j)) {
845 args->push(argument_oop(j));
846 } else {
847 args->push(argument(j));
848 }
849 }
850 int argslen = args->length();
851 if (_deps != nullptr && _deps->log() != nullptr) {
852 if (ciEnv::current() != nullptr) {
853 Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
854 } else {
855 // Treat the CompileLog as an xmlstream instead
856 Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
857 }
858 } else {
859 Dependencies::write_dependency_to(xtty, type(), args, witness);
860 }
861 guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
862 }
863
864 void Dependencies::DepStream::print_dependency(outputStream* st, Klass* witness, bool verbose) {
865 ResourceMark rm;
866 int nargs = argument_count();
867 GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
868 for (int j = 0; j < nargs; j++) {
869 if (is_oop_argument(j)) {
870 args->push(argument_oop(j));
871 } else {
872 args->push(argument(j));
873 }
874 }
875 int argslen = args->length();
876 Dependencies::print_dependency(type(), args, witness, st);
877 if (verbose) {
878 if (_code != nullptr) {
879 st->print(" code: ");
880 _code->print_value_on(st);
881 st->cr();
882 }
883 }
884 guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
885 }
886
887
888 /// Dependency stream support (decodes dependencies from an nmethod):
889
890 #ifdef ASSERT
891 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
892 assert(must_be_in_vm(), "raw oops here");
893 _byte_limit = byte_limit;
894 _type = undefined_dependency; // defeat "already at end" assert
895 assert((_code!=nullptr) + (_deps!=nullptr) == 1, "one or t'other");
896 }
897 #endif //ASSERT
898
899 bool Dependencies::DepStream::next() {
900 assert(_type != end_marker, "already at end");
901 if (_bytes.position() == 0 && _code != nullptr
902 && _code->dependencies_size() == 0) {
903 // Method has no dependencies at all.
904 return false;
905 }
906 int code_byte = (_bytes.read_byte() & 0xFF);
907 if (code_byte == end_marker) {
908 DEBUG_ONLY(_type = end_marker);
909 return false;
910 } else {
911 int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
912 code_byte -= ctxk_bit;
913 DepType dept = (DepType)code_byte;
914 _type = dept;
915 Dependencies::check_valid_dependency_type(dept);
916 int stride = _dep_args[dept];
917 assert(stride == dep_args(dept), "sanity");
918 int skipj = -1;
919 if (ctxk_bit != 0) {
920 skipj = 0; // currently the only context argument is at zero
921 assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
922 }
923 for (int j = 0; j < stride; j++) {
924 _xi[j] = (j == skipj)? 0: _bytes.read_int();
925 }
926 DEBUG_ONLY(_xi[stride] = -1); // help detect overruns
927 return true;
928 }
929 }
930
931 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
932 Metadata* o = nullptr;
933 if (_code != nullptr) {
934 o = _code->metadata_at(i);
935 } else {
936 o = _deps->oop_recorder()->metadata_at(i);
937 }
938 return o;
939 }
940
941 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
942 return (_code != nullptr)
943 ? _code->oop_at(i)
944 : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
945 }
946
947 Metadata* Dependencies::DepStream::argument(int i) {
948 Metadata* result = recorded_metadata_at(argument_index(i));
949
950 if (result == nullptr) { // Explicit context argument can be compressed
951 int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
952 if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
953 result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
954 }
955 }
956
957 assert(result == nullptr || result->is_klass() || result->is_method(), "must be");
958 return result;
959 }
960
961 /**
962 * Returns a unique identifier for each dependency argument.
963 */
964 uintptr_t Dependencies::DepStream::get_identifier(int i) {
965 if (is_oop_argument(i)) {
966 return (uintptr_t)(oopDesc*)argument_oop(i);
967 } else {
968 return (uintptr_t)argument(i);
969 }
970 }
971
972 oop Dependencies::DepStream::argument_oop(int i) {
973 oop result = recorded_oop_at(argument_index(i));
974 assert(oopDesc::is_oop_or_null(result), "must be");
975 return result;
976 }
977
978 InstanceKlass* Dependencies::DepStream::context_type() {
979 assert(must_be_in_vm(), "raw oops here");
980
981 // Most dependencies have an explicit context type argument.
982 {
983 int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
984 if (ctxkj >= 0) {
985 Metadata* k = argument(ctxkj);
986 assert(k != nullptr && k->is_klass(), "type check");
987 return InstanceKlass::cast((Klass*)k);
988 }
989 }
990
991 // Some dependencies are using the klass of the first object
992 // argument as implicit context type.
993 {
994 int ctxkj = dep_implicit_context_arg(type());
995 if (ctxkj >= 0) {
996 Klass* k = argument_oop(ctxkj)->klass();
997 assert(k != nullptr, "type check");
998 return InstanceKlass::cast(k);
999 }
1000 }
1001
1002 // And some dependencies don't have a context type at all,
1003 // e.g. evol_method.
1004 return nullptr;
1005 }
1006
1007 // ----------------- DependencySignature --------------------------------------
1008 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
1009 if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
1010 return false;
1011 }
1012
1013 for (int i = 0; i < s1.args_count(); i++) {
1014 if (s1.arg(i) != s2.arg(i)) {
1015 return false;
1016 }
1017 }
1018 return true;
1019 }
1020
1021 /// Checking dependencies
1022
1023 // This hierarchy walker inspects subtypes of a given type, trying to find a "bad" class which breaks a dependency.
1024 // Such a class is called a "witness" to the broken dependency.
1025 // While searching around, we ignore "participants", which are already known to the dependency.
1026 class AbstractClassHierarchyWalker {
1027 public:
1028 enum { PARTICIPANT_LIMIT = 3 };
1029
1030 private:
1031 // if non-zero, tells how many witnesses to convert to participants
1032 uint _record_witnesses;
1033
1034 // special classes which are not allowed to be witnesses:
1035 Klass* _participants[PARTICIPANT_LIMIT+1];
1036 uint _num_participants;
1037
1038 #ifdef ASSERT
1039 uint _nof_requests; // one-shot walker
1040 #endif // ASSERT
1041
1042 static PerfCounter* _perf_find_witness_anywhere_calls_count;
1043 static PerfCounter* _perf_find_witness_anywhere_steps_count;
1044 static PerfCounter* _perf_find_witness_in_calls_count;
1045
1046 protected:
1047 virtual Klass* find_witness_in(KlassDepChange& changes) = 0;
1048 virtual Klass* find_witness_anywhere(InstanceKlass* context_type) = 0;
1049
1050 AbstractClassHierarchyWalker(Klass* participant) : _record_witnesses(0), _num_participants(0)
1051 #ifdef ASSERT
1052 , _nof_requests(0)
1053 #endif // ASSERT
1054 {
1055 for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1056 _participants[i] = nullptr;
1057 }
1058 if (participant != nullptr) {
1059 add_participant(participant);
1060 }
1061 }
1062
1063 bool is_participant(Klass* k) {
1064 for (uint i = 0; i < _num_participants; i++) {
1065 if (_participants[i] == k) {
1066 return true;
1067 }
1068 }
1069 return false;
1070 }
1071
1072 bool record_witness(Klass* witness) {
1073 if (_record_witnesses > 0) {
1074 --_record_witnesses;
1075 add_participant(witness);
1076 return false; // not a witness
1077 } else {
1078 return true; // is a witness
1079 }
1080 }
1081
1082 class CountingClassHierarchyIterator : public ClassHierarchyIterator {
1083 private:
1084 jlong _nof_steps;
1085 public:
1086 CountingClassHierarchyIterator(InstanceKlass* root) : ClassHierarchyIterator(root), _nof_steps(0) {}
1087
1088 void next() {
1089 _nof_steps++;
1090 ClassHierarchyIterator::next();
1091 }
1092
1093 ~CountingClassHierarchyIterator() {
1094 if (UsePerfData) {
1095 _perf_find_witness_anywhere_steps_count->inc(_nof_steps);
1096 }
1097 }
1098 };
1099
1100 public:
1101 uint num_participants() { return _num_participants; }
1102 Klass* participant(uint n) {
1103 assert(n <= _num_participants, "oob");
1104 if (n < _num_participants) {
1105 return _participants[n];
1106 } else {
1107 return nullptr;
1108 }
1109 }
1110
1111 void add_participant(Klass* participant) {
1112 assert(!is_participant(participant), "sanity");
1113 assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1114 uint np = _num_participants++;
1115 _participants[np] = participant;
1116 }
1117
1118 void record_witnesses(uint add) {
1119 if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT;
1120 assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1121 _record_witnesses = add;
1122 }
1123
1124 Klass* find_witness(InstanceKlass* context_type, KlassDepChange* changes = nullptr);
1125
1126 static void init();
1127 static void print_statistics();
1128 };
1129
1130 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_calls_count = nullptr;
1131 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_anywhere_steps_count = nullptr;
1132 PerfCounter* AbstractClassHierarchyWalker::_perf_find_witness_in_calls_count = nullptr;
1133
1134 void AbstractClassHierarchyWalker::init() {
1135 if (UsePerfData) {
1136 EXCEPTION_MARK;
1137 _perf_find_witness_anywhere_calls_count =
1138 PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhere", PerfData::U_Events, CHECK);
1139 _perf_find_witness_anywhere_steps_count =
1140 PerfDataManager::create_counter(SUN_CI, "findWitnessAnywhereSteps", PerfData::U_Events, CHECK);
1141 _perf_find_witness_in_calls_count =
1142 PerfDataManager::create_counter(SUN_CI, "findWitnessIn", PerfData::U_Events, CHECK);
1143 }
1144 }
1145
1146 Klass* AbstractClassHierarchyWalker::find_witness(InstanceKlass* context_type, KlassDepChange* changes) {
1147 // Current thread must be in VM (not native mode, as in CI):
1148 assert(must_be_in_vm(), "raw oops here");
1149 // Must not move the class hierarchy during this check:
1150 assert_locked_or_safepoint(Compile_lock);
1151 assert(_nof_requests++ == 0, "repeated requests are not supported");
1152
1153 assert(changes == nullptr || changes->involves_context(context_type), "irrelevant dependency");
1154
1155 // (Note: Interfaces do not have subclasses.)
1156 // If it is an interface, search its direct implementors.
1157 // (Their subclasses are additional indirect implementors. See InstanceKlass::add_implementor().)
1158 if (context_type->is_interface()) {
1159 int nof_impls = context_type->nof_implementors();
1160 if (nof_impls == 0) {
1161 return nullptr; // no implementors
1162 } else if (nof_impls == 1) { // unique implementor
1163 assert(context_type != context_type->implementor(), "not unique");
1164 context_type = context_type->implementor();
1165 } else { // nof_impls >= 2
1166 // Avoid this case: *I.m > { A.m, C }; B.m > C
1167 // Here, I.m has 2 concrete implementations, but m appears unique
1168 // as A.m, because the search misses B.m when checking C.
1169 // The inherited method B.m was getting missed by the walker
1170 // when interface 'I' was the starting point.
1171 // %%% Until this is fixed more systematically, bail out.
1172 return context_type;
1173 }
1174 }
1175 assert(!context_type->is_interface(), "no interfaces allowed");
1176
1177 if (changes != nullptr) {
1178 if (UsePerfData) {
1179 _perf_find_witness_in_calls_count->inc();
1180 }
1181 return find_witness_in(*changes);
1182 } else {
1183 if (UsePerfData) {
1184 _perf_find_witness_anywhere_calls_count->inc();
1185 }
1186 return find_witness_anywhere(context_type);
1187 }
1188 }
1189
1190 class ConcreteSubtypeFinder : public AbstractClassHierarchyWalker {
1191 private:
1192 bool is_witness(Klass* k);
1193
1194 protected:
1195 virtual Klass* find_witness_in(KlassDepChange& changes);
1196 virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1197
1198 public:
1199 ConcreteSubtypeFinder(Klass* participant = nullptr) : AbstractClassHierarchyWalker(participant) {}
1200 };
1201
1202 bool ConcreteSubtypeFinder::is_witness(Klass* k) {
1203 if (Dependencies::is_concrete_klass(k)) {
1204 return record_witness(k); // concrete subtype
1205 } else {
1206 return false; // not a concrete class
1207 }
1208 }
1209
1210 Klass* ConcreteSubtypeFinder::find_witness_in(KlassDepChange& changes) {
1211 // When looking for unexpected concrete types, do not look beneath expected ones:
1212 // * CX > CC > C' is OK, even if C' is new.
1213 // * CX > { CC, C' } is not OK if C' is new, and C' is the witness.
1214 Klass* new_type = changes.as_new_klass_change()->new_type();
1215 assert(!is_participant(new_type), "only old classes are participants");
1216 // If the new type is a subtype of a participant, we are done.
1217 for (uint i = 0; i < num_participants(); i++) {
1218 if (changes.involves_context(participant(i))) {
1219 // new guy is protected from this check by previous participant
1220 return nullptr;
1221 }
1222 }
1223 if (is_witness(new_type)) {
1224 return new_type;
1225 }
1226 // No witness found. The dependency remains unbroken.
1227 return nullptr;
1228 }
1229
1230 Klass* ConcreteSubtypeFinder::find_witness_anywhere(InstanceKlass* context_type) {
1231 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1232 Klass* sub = iter.klass();
1233 // Do not report participant types.
1234 if (is_participant(sub)) {
1235 // Don't walk beneath a participant since it hides witnesses.
1236 iter.skip_subclasses();
1237 } else if (is_witness(sub)) {
1238 return sub; // found a witness
1239 }
1240 }
1241 // No witness found. The dependency remains unbroken.
1242 return nullptr;
1243 }
1244
1245 class ConcreteMethodFinder : public AbstractClassHierarchyWalker {
1246 private:
1247 Symbol* _name;
1248 Symbol* _signature;
1249
1250 // cache of method lookups
1251 Method* _found_methods[PARTICIPANT_LIMIT+1];
1252
1253 bool is_witness(Klass* k);
1254
1255 protected:
1256 virtual Klass* find_witness_in(KlassDepChange& changes);
1257 virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1258
1259 public:
1260 bool witnessed_reabstraction_in_supers(Klass* k);
1261
1262 ConcreteMethodFinder(Method* m, Klass* participant = nullptr) : AbstractClassHierarchyWalker(participant) {
1263 assert(m != nullptr && m->is_method(), "sanity");
1264 _name = m->name();
1265 _signature = m->signature();
1266
1267 for (int i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1268 _found_methods[i] = nullptr;
1269 }
1270 }
1271
1272 // Note: If n==num_participants, returns nullptr.
1273 Method* found_method(uint n) {
1274 assert(n <= num_participants(), "oob");
1275 Method* fm = _found_methods[n];
1276 assert(n == num_participants() || fm != nullptr, "proper usage");
1277 if (fm != nullptr && fm->method_holder() != participant(n)) {
1278 // Default methods from interfaces can be added to classes. In
1279 // that case the holder of the method is not the class but the
1280 // interface where it's defined.
1281 assert(fm->is_default_method(), "sanity");
1282 return nullptr;
1283 }
1284 return fm;
1285 }
1286
1287 void add_participant(Klass* participant) {
1288 AbstractClassHierarchyWalker::add_participant(participant);
1289 _found_methods[num_participants()] = nullptr;
1290 }
1291
1292 bool record_witness(Klass* witness, Method* m) {
1293 _found_methods[num_participants()] = m;
1294 return AbstractClassHierarchyWalker::record_witness(witness);
1295 }
1296
1297 private:
1298 static PerfCounter* _perf_find_witness_anywhere_calls_count;
1299 static PerfCounter* _perf_find_witness_anywhere_steps_count;
1300 static PerfCounter* _perf_find_witness_in_calls_count;
1301
1302 public:
1303 static void init();
1304 static void print_statistics();
1305 };
1306
1307 bool ConcreteMethodFinder::is_witness(Klass* k) {
1308 if (is_participant(k)) {
1309 return false; // do not report participant types
1310 }
1311 if (k->is_instance_klass()) {
1312 InstanceKlass* ik = InstanceKlass::cast(k);
1313 // Search class hierarchy first, skipping private implementations
1314 // as they never override any inherited methods
1315 Method* m = ik->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1316 if (Dependencies::is_concrete_method(m, ik)) {
1317 return record_witness(k, m); // concrete method found
1318 } else {
1319 // Check for re-abstraction of method
1320 if (!ik->is_interface() && m != nullptr && m->is_abstract()) {
1321 // Found a matching abstract method 'm' in the class hierarchy.
1322 // This is fine iff 'k' is an abstract class and all concrete subtypes
1323 // of 'k' override 'm' and are participates of the current search.
1324 ConcreteSubtypeFinder wf;
1325 for (uint i = 0; i < num_participants(); i++) {
1326 Klass* p = participant(i);
1327 wf.add_participant(p);
1328 }
1329 Klass* w = wf.find_witness(ik);
1330 if (w != nullptr) {
1331 Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1332 if (!Dependencies::is_concrete_method(wm, w)) {
1333 // Found a concrete subtype 'w' which does not override abstract method 'm'.
1334 // Bail out because 'm' could be called with 'w' as receiver (leading to an
1335 // AbstractMethodError) and thus the method we are looking for is not unique.
1336 return record_witness(k, m);
1337 }
1338 }
1339 }
1340 // Check interface defaults also, if any exist.
1341 Array<Method*>* default_methods = ik->default_methods();
1342 if (default_methods != nullptr) {
1343 Method* dm = ik->find_method(default_methods, _name, _signature);
1344 if (Dependencies::is_concrete_method(dm, nullptr)) {
1345 return record_witness(k, dm); // default method found
1346 }
1347 }
1348 return false; // no concrete method found
1349 }
1350 } else {
1351 return false; // no methods to find in an array type
1352 }
1353 }
1354
1355 Klass* ConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1356 // When looking for unexpected concrete methods, look beneath expected ones, to see if there are overrides.
1357 // * CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1358 Klass* new_type = changes.as_new_klass_change()->new_type();
1359 assert(!is_participant(new_type), "only old classes are participants");
1360 if (is_witness(new_type)) {
1361 return new_type;
1362 } else {
1363 // No witness found, but is_witness() doesn't detect method re-abstraction in case of spot-checking.
1364 if (witnessed_reabstraction_in_supers(new_type)) {
1365 return new_type;
1366 }
1367 }
1368 // No witness found. The dependency remains unbroken.
1369 return nullptr;
1370 }
1371
1372 bool ConcreteMethodFinder::witnessed_reabstraction_in_supers(Klass* k) {
1373 if (!k->is_instance_klass()) {
1374 return false; // no methods to find in an array type
1375 } else {
1376 // Looking for a case when an abstract method is inherited into a concrete class.
1377 if (Dependencies::is_concrete_klass(k) && !k->is_interface()) {
1378 Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1379 if (m != nullptr) {
1380 return false; // no reabstraction possible: local method found
1381 }
1382 for (InstanceKlass* super = k->java_super(); super != nullptr; super = super->java_super()) {
1383 m = super->find_instance_method(_name, _signature, Klass::PrivateLookupMode::skip);
1384 if (m != nullptr) { // inherited method found
1385 if (m->is_abstract() || m->is_overpass()) {
1386 return record_witness(super, m); // abstract method found
1387 }
1388 return false;
1389 }
1390 }
1391 // Miranda.
1392 return true;
1393 }
1394 return false;
1395 }
1396 }
1397
1398
1399 Klass* ConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1400 // Walk hierarchy under a context type, looking for unexpected types.
1401 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1402 Klass* sub = iter.klass();
1403 if (is_witness(sub)) {
1404 return sub; // found a witness
1405 }
1406 }
1407 // No witness found. The dependency remains unbroken.
1408 return nullptr;
1409 }
1410
1411 // For some method m and some class ctxk (subclass of method holder),
1412 // enumerate all distinct overrides of m in concrete subclasses of ctxk.
1413 // It relies on vtable/itable information to perform method selection on each linked subclass
1414 // and ignores all non yet linked ones (speculatively treat them as "effectively abstract").
1415 class LinkedConcreteMethodFinder : public AbstractClassHierarchyWalker {
1416 private:
1417 InstanceKlass* _resolved_klass; // resolved class (JVMS-5.4.3.1)
1418 InstanceKlass* _declaring_klass; // the holder of resolved method (JVMS-5.4.3.3)
1419 int _vtable_index; // vtable/itable index of the resolved method
1420 bool _do_itable_lookup; // choose between itable and vtable lookup logic
1421
1422 // cache of method lookups
1423 Method* _found_methods[PARTICIPANT_LIMIT+1];
1424
1425 bool is_witness(Klass* k);
1426 Method* select_method(InstanceKlass* recv_klass);
1427 static int compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method, bool& is_itable_index);
1428 static bool is_concrete_klass(InstanceKlass* ik);
1429
1430 void add_participant(Method* m, Klass* participant) {
1431 uint np = num_participants();
1432 AbstractClassHierarchyWalker::add_participant(participant);
1433 assert(np + 1 == num_participants(), "sanity");
1434 _found_methods[np] = m; // record the method for the participant
1435 }
1436
1437 bool record_witness(Klass* witness, Method* m) {
1438 for (uint i = 0; i < num_participants(); i++) {
1439 if (found_method(i) == m) {
1440 return false; // already recorded
1441 }
1442 }
1443 // Record not yet seen method.
1444 _found_methods[num_participants()] = m;
1445 return AbstractClassHierarchyWalker::record_witness(witness);
1446 }
1447
1448 void initialize(Method* participant) {
1449 for (uint i = 0; i < PARTICIPANT_LIMIT+1; i++) {
1450 _found_methods[i] = nullptr;
1451 }
1452 if (participant != nullptr) {
1453 add_participant(participant, participant->method_holder());
1454 }
1455 }
1456
1457 protected:
1458 virtual Klass* find_witness_in(KlassDepChange& changes);
1459 virtual Klass* find_witness_anywhere(InstanceKlass* context_type);
1460
1461 public:
1462 // In order to perform method selection, the following info is needed:
1463 // (1) interface or virtual call;
1464 // (2) vtable/itable index;
1465 // (3) declaring class (in case of interface call).
1466 //
1467 // It is prepared based on the results of method resolution: resolved class and resolved method (as specified in JVMS-5.4.3.3).
1468 // Optionally, a method which was previously determined as a unique target (uniqm) is added as a participant
1469 // to enable dependency spot-checking and speed up the search.
1470 LinkedConcreteMethodFinder(InstanceKlass* resolved_klass, Method* resolved_method, Method* uniqm = nullptr) : AbstractClassHierarchyWalker(nullptr) {
1471 assert(resolved_klass->is_linked(), "required");
1472 assert(resolved_method->method_holder()->is_linked(), "required");
1473 assert(!resolved_method->can_be_statically_bound(), "no vtable index available");
1474
1475 _resolved_klass = resolved_klass;
1476 _declaring_klass = resolved_method->method_holder();
1477 _vtable_index = compute_vtable_index(resolved_klass, resolved_method,
1478 _do_itable_lookup); // out parameter
1479 assert(_vtable_index >= 0, "invalid vtable index");
1480
1481 initialize(uniqm);
1482 }
1483
1484 // Note: If n==num_participants, returns nullptr.
1485 Method* found_method(uint n) {
1486 assert(n <= num_participants(), "oob");
1487 assert(participant(n) != nullptr || n == num_participants(), "proper usage");
1488 return _found_methods[n];
1489 }
1490 };
1491
1492 Klass* LinkedConcreteMethodFinder::find_witness_in(KlassDepChange& changes) {
1493 Klass* type = changes.type();
1494
1495 assert(!is_participant(type), "only old classes are participants");
1496
1497 if (is_witness(type)) {
1498 return type;
1499 }
1500 return nullptr; // No witness found. The dependency remains unbroken.
1501 }
1502
1503 Klass* LinkedConcreteMethodFinder::find_witness_anywhere(InstanceKlass* context_type) {
1504 for (CountingClassHierarchyIterator iter(context_type); !iter.done(); iter.next()) {
1505 Klass* sub = iter.klass();
1506 if (is_witness(sub)) {
1507 return sub;
1508 }
1509 if (sub->is_instance_klass() && !InstanceKlass::cast(sub)->is_linked()) {
1510 iter.skip_subclasses(); // ignore not yet linked classes
1511 }
1512 }
1513 return nullptr; // No witness found. The dependency remains unbroken.
1514 }
1515
1516 bool LinkedConcreteMethodFinder::is_witness(Klass* k) {
1517 if (is_participant(k)) {
1518 return false; // do not report participant types
1519 } else if (k->is_instance_klass()) {
1520 InstanceKlass* ik = InstanceKlass::cast(k);
1521 if (is_concrete_klass(ik)) {
1522 Method* m = select_method(ik);
1523 return record_witness(ik, m);
1524 } else {
1525 return false; // ignore non-concrete holder class
1526 }
1527 } else {
1528 return false; // no methods to find in an array type
1529 }
1530 }
1531
1532 Method* LinkedConcreteMethodFinder::select_method(InstanceKlass* recv_klass) {
1533 Method* selected_method = nullptr;
1534 if (_do_itable_lookup) {
1535 assert(_declaring_klass->is_interface(), "sanity");
1536 bool implements_interface; // initialized by method_at_itable_or_null()
1537 selected_method = recv_klass->method_at_itable_or_null(_declaring_klass, _vtable_index,
1538 implements_interface); // out parameter
1539 assert(implements_interface, "not implemented");
1540 } else {
1541 selected_method = recv_klass->method_at_vtable(_vtable_index);
1542 }
1543 return selected_method; // nullptr when corresponding slot is empty (AbstractMethodError case)
1544 }
1545
1546 int LinkedConcreteMethodFinder::compute_vtable_index(InstanceKlass* resolved_klass, Method* resolved_method,
1547 // out parameter
1548 bool& is_itable_index) {
1549 if (resolved_klass->is_interface() && resolved_method->has_itable_index()) {
1550 is_itable_index = true;
1551 return resolved_method->itable_index();
1552 }
1553 // Check for default or miranda method first.
1554 InstanceKlass* declaring_klass = resolved_method->method_holder();
1555 if (!resolved_klass->is_interface() && declaring_klass->is_interface()) {
1556 is_itable_index = false;
1557 return resolved_klass->vtable_index_of_interface_method(resolved_method);
1558 }
1559 // At this point we are sure that resolved_method is virtual and not
1560 // a default or miranda method; therefore, it must have a valid vtable index.
1561 assert(resolved_method->has_vtable_index(), "");
1562 is_itable_index = false;
1563 return resolved_method->vtable_index();
1564 }
1565
1566 bool LinkedConcreteMethodFinder::is_concrete_klass(InstanceKlass* ik) {
1567 if (!Dependencies::is_concrete_klass(ik)) {
1568 return false; // not concrete
1569 }
1570 if (ik->is_interface()) {
1571 return false; // interfaces aren't concrete
1572 }
1573 if (!ik->is_linked()) {
1574 return false; // not yet linked classes don't have instances
1575 }
1576 return true;
1577 }
1578
1579 #ifdef ASSERT
1580 // Assert that m is inherited into ctxk, without intervening overrides.
1581 // (May return true even if this is not true, in corner cases where we punt.)
1582 bool Dependencies::verify_method_context(InstanceKlass* ctxk, Method* m) {
1583 if (m->is_private()) {
1584 return false; // Quick lose. Should not happen.
1585 }
1586 if (m->method_holder() == ctxk) {
1587 return true; // Quick win.
1588 }
1589 if (!(m->is_public() || m->is_protected())) {
1590 // The override story is complex when packages get involved.
1591 return true; // Must punt the assertion to true.
1592 }
1593 Method* lm = ctxk->lookup_method(m->name(), m->signature());
1594 if (lm == nullptr) {
1595 // It might be an interface method
1596 lm = ctxk->lookup_method_in_ordered_interfaces(m->name(), m->signature());
1597 }
1598 if (lm == m) {
1599 // Method m is inherited into ctxk.
1600 return true;
1601 }
1602 if (lm != nullptr) {
1603 if (!(lm->is_public() || lm->is_protected())) {
1604 // Method is [package-]private, so the override story is complex.
1605 return true; // Must punt the assertion to true.
1606 }
1607 if (lm->is_static()) {
1608 // Static methods don't override non-static so punt
1609 return true;
1610 }
1611 if (!Dependencies::is_concrete_method(lm, ctxk) &&
1612 !Dependencies::is_concrete_method(m, ctxk)) {
1613 // They are both non-concrete
1614 if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1615 // Method m is overridden by lm, but both are non-concrete.
1616 return true;
1617 }
1618 if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1619 ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1620 // Interface method defined in multiple super interfaces
1621 return true;
1622 }
1623 }
1624 }
1625 ResourceMark rm;
1626 tty->print_cr("Dependency method not found in the associated context:");
1627 tty->print_cr(" context = %s", ctxk->external_name());
1628 tty->print( " method = "); m->print_short_name(tty); tty->cr();
1629 if (lm != nullptr) {
1630 tty->print( " found = "); lm->print_short_name(tty); tty->cr();
1631 }
1632 return false;
1633 }
1634 #endif // ASSERT
1635
1636 bool Dependencies::is_concrete_klass(Klass* k) {
1637 if (k->is_abstract()) return false;
1638 // %%% We could treat classes which are concrete but
1639 // have not yet been instantiated as virtually abstract.
1640 // This would require a deoptimization barrier on first instantiation.
1641 //if (k->is_not_instantiated()) return false;
1642 return true;
1643 }
1644
1645 bool Dependencies::is_concrete_method(Method* m, Klass* k) {
1646 // nullptr is not a concrete method.
1647 if (m == nullptr) {
1648 return false;
1649 }
1650 // Statics are irrelevant to virtual call sites.
1651 if (m->is_static()) {
1652 return false;
1653 }
1654 // Abstract methods are not concrete.
1655 if (m->is_abstract()) {
1656 return false;
1657 }
1658 // Overpass (error) methods are not concrete if k is abstract.
1659 if (m->is_overpass() && k != nullptr) {
1660 return !k->is_abstract();
1661 }
1662 // Note "true" is conservative answer: overpass clause is false if k == nullptr,
1663 // implies return true if answer depends on overpass clause.
1664 return true;
1665 }
1666
1667 Klass* Dependencies::find_finalizable_subclass(InstanceKlass* ik) {
1668 for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) {
1669 Klass* sub = iter.klass();
1670 if (sub->has_finalizer() && !sub->is_interface()) {
1671 return sub;
1672 }
1673 }
1674 return nullptr; // not found
1675 }
1676
1677 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1678 if (k->is_abstract()) return false;
1679 // We could also return false if k does not yet appear to be
1680 // instantiated, if the VM version supports this distinction also.
1681 //if (k->is_not_instantiated()) return false;
1682 return true;
1683 }
1684
1685 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1686 return k->has_finalizable_subclass();
1687 }
1688
1689 // Any use of the contents (bytecodes) of a method must be
1690 // marked by an "evol_method" dependency, if those contents
1691 // can change. (Note: A method is always dependent on itself.)
1692 Klass* Dependencies::check_evol_method(Method* m) {
1693 assert(must_be_in_vm(), "raw oops here");
1694 // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1695 // Or is there a now a breakpoint?
1696 // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1697 if (m->is_old()
1698 || m->number_of_breakpoints() > 0) {
1699 return m->method_holder();
1700 } else {
1701 return nullptr;
1702 }
1703 }
1704
1705 // This is a strong assertion: It is that the given type
1706 // has no subtypes whatever. It is most useful for
1707 // optimizing checks on reflected types or on array types.
1708 // (Checks on types which are derived from real instances
1709 // can be optimized more strongly than this, because we
1710 // know that the checked type comes from a concrete type,
1711 // and therefore we can disregard abstract types.)
1712 Klass* Dependencies::check_leaf_type(InstanceKlass* ctxk) {
1713 assert(must_be_in_vm(), "raw oops here");
1714 assert_locked_or_safepoint(Compile_lock);
1715 Klass* sub = ctxk->subklass();
1716 if (sub != nullptr) {
1717 return sub;
1718 } else if (ctxk->nof_implementors() != 0) {
1719 // if it is an interface, it must be unimplemented
1720 // (if it is not an interface, nof_implementors is always zero)
1721 InstanceKlass* impl = ctxk->implementor();
1722 assert(impl != nullptr, "must be set");
1723 return impl;
1724 } else {
1725 return nullptr;
1726 }
1727 }
1728
1729 // Test the assertion that conck is the only concrete subtype* of ctxk.
1730 // The type conck itself is allowed to have have further concrete subtypes.
1731 // This allows the compiler to narrow occurrences of ctxk by conck,
1732 // when dealing with the types of actual instances.
1733 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(InstanceKlass* ctxk,
1734 Klass* conck,
1735 NewKlassDepChange* changes) {
1736 ConcreteSubtypeFinder wf(conck);
1737 Klass* k = wf.find_witness(ctxk, changes);
1738 return k;
1739 }
1740
1741
1742 // Find the unique concrete proper subtype of ctxk, or nullptr if there
1743 // is more than one concrete proper subtype. If there are no concrete
1744 // proper subtypes, return ctxk itself, whether it is concrete or not.
1745 // The returned subtype is allowed to have have further concrete subtypes.
1746 // That is, return CC1 for CX > CC1 > CC2, but nullptr for CX > { CC1, CC2 }.
1747 Klass* Dependencies::find_unique_concrete_subtype(InstanceKlass* ctxk) {
1748 ConcreteSubtypeFinder wf(ctxk); // Ignore ctxk when walking.
1749 wf.record_witnesses(1); // Record one other witness when walking.
1750 Klass* wit = wf.find_witness(ctxk);
1751 if (wit != nullptr) return nullptr; // Too many witnesses.
1752 Klass* conck = wf.participant(0);
1753 if (conck == nullptr) {
1754 return ctxk; // Return ctxk as a flag for "no subtypes".
1755 } else {
1756 #ifndef PRODUCT
1757 // Make sure the dependency mechanism will pass this discovery:
1758 if (VerifyDependencies) {
1759 // Turn off dependency tracing while actually testing deps.
1760 FlagSetting fs(_verify_in_progress, true);
1761 if (!Dependencies::is_concrete_klass(ctxk)) {
1762 guarantee(nullptr == (void *)
1763 check_abstract_with_unique_concrete_subtype(ctxk, conck),
1764 "verify dep.");
1765 }
1766 }
1767 #endif //PRODUCT
1768 return conck;
1769 }
1770 }
1771
1772 // Try to determine whether root method in some context is concrete or not based on the information about the unique method
1773 // in that context. It exploits the fact that concrete root method is always inherited into the context when there's a unique method.
1774 // Hence, unique method holder is always a supertype of the context class when root method is concrete.
1775 // Examples for concrete_root_method
1776 // C (C.m uniqm)
1777 // |
1778 // CX (ctxk) uniqm is inherited into context.
1779 //
1780 // CX (ctxk) (CX.m uniqm) here uniqm is defined in ctxk.
1781 // Examples for !concrete_root_method
1782 // CX (ctxk)
1783 // |
1784 // C (C.m uniqm) uniqm is in subtype of ctxk.
1785 bool Dependencies::is_concrete_root_method(Method* uniqm, InstanceKlass* ctxk) {
1786 if (uniqm == nullptr) {
1787 return false; // match Dependencies::is_concrete_method() behavior
1788 }
1789 // Theoretically, the "direction" of subtype check matters here.
1790 // On one hand, in case of interface context with a single implementor, uniqm can be in a superclass of the implementor which
1791 // is not related to context class.
1792 // On another hand, uniqm could come from an interface unrelated to the context class, but right now it is not possible:
1793 // it is required that uniqm->method_holder() is the participant (uniqm->method_holder() <: ctxk), hence a default method
1794 // can't be used as unique.
1795 if (ctxk->is_interface()) {
1796 InstanceKlass* implementor = ctxk->implementor();
1797 assert(implementor != ctxk, "single implementor only"); // should have been invalidated earlier
1798 ctxk = implementor;
1799 }
1800 InstanceKlass* holder = uniqm->method_holder();
1801 assert(!holder->is_interface(), "no default methods allowed");
1802 assert(ctxk->is_subclass_of(holder) || holder->is_subclass_of(ctxk), "not related");
1803 return ctxk->is_subclass_of(holder);
1804 }
1805
1806 // If a class (or interface) has a unique concrete method uniqm, return nullptr.
1807 // Otherwise, return a class that contains an interfering method.
1808 Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1809 Method* uniqm,
1810 NewKlassDepChange* changes) {
1811 ConcreteMethodFinder wf(uniqm, uniqm->method_holder());
1812 Klass* k = wf.find_witness(ctxk, changes);
1813 if (k != nullptr) {
1814 return k;
1815 }
1816 if (!Dependencies::is_concrete_root_method(uniqm, ctxk) || changes != nullptr) {
1817 Klass* conck = find_witness_AME(ctxk, uniqm, changes);
1818 if (conck != nullptr) {
1819 // Found a concrete subtype 'conck' which does not override abstract root method.
1820 return conck;
1821 }
1822 }
1823 return nullptr;
1824 }
1825
1826 Klass* Dependencies::check_unique_implementor(InstanceKlass* ctxk, Klass* uniqk, NewKlassDepChange* changes) {
1827 assert(ctxk->is_interface(), "sanity");
1828 assert(ctxk->nof_implementors() > 0, "no implementors");
1829 if (ctxk->nof_implementors() == 1) {
1830 assert(ctxk->implementor() == uniqk, "sanity");
1831 return nullptr;
1832 }
1833 return ctxk; // no unique implementor
1834 }
1835
1836 // Search for AME.
1837 // There are two version of checks.
1838 // 1) Spot checking version(Classload time). Newly added class is checked for AME.
1839 // Checks whether abstract/overpass method is inherited into/declared in newly added concrete class.
1840 // 2) Compile time analysis for abstract/overpass(abstract klass) root_m. The non uniqm subtrees are checked for concrete classes.
1841 Klass* Dependencies::find_witness_AME(InstanceKlass* ctxk, Method* m, KlassDepChange* changes) {
1842 if (m != nullptr) {
1843 if (changes != nullptr) {
1844 // Spot checking version.
1845 ConcreteMethodFinder wf(m);
1846 Klass* new_type = changes->as_new_klass_change()->new_type();
1847 if (wf.witnessed_reabstraction_in_supers(new_type)) {
1848 return new_type;
1849 }
1850 } else {
1851 // Note: It is required that uniqm->method_holder() is the participant (see ClassHierarchyWalker::found_method()).
1852 ConcreteSubtypeFinder wf(m->method_holder());
1853 Klass* conck = wf.find_witness(ctxk);
1854 if (conck != nullptr) {
1855 Method* cm = InstanceKlass::cast(conck)->find_instance_method(m->name(), m->signature(), Klass::PrivateLookupMode::skip);
1856 if (!Dependencies::is_concrete_method(cm, conck)) {
1857 return conck;
1858 }
1859 }
1860 }
1861 }
1862 return nullptr;
1863 }
1864
1865 // This function is used by find_unique_concrete_method(non vtable based)
1866 // to check whether subtype method overrides the base method.
1867 static bool overrides(Method* sub_m, Method* base_m) {
1868 assert(base_m != nullptr, "base method should be non null");
1869 if (sub_m == nullptr) {
1870 return false;
1871 }
1872 /**
1873 * If base_m is public or protected then sub_m always overrides.
1874 * If base_m is !public, !protected and !private (i.e. base_m is package private)
1875 * then sub_m should be in the same package as that of base_m.
1876 * For package private base_m this is conservative approach as it allows only subset of all allowed cases in
1877 * the jvm specification.
1878 **/
1879 if (base_m->is_public() || base_m->is_protected() ||
1880 base_m->method_holder()->is_same_class_package(sub_m->method_holder())) {
1881 return true;
1882 }
1883 return false;
1884 }
1885
1886 // Find the set of all non-abstract methods under ctxk that match m.
1887 // (The method m must be defined or inherited in ctxk.)
1888 // Include m itself in the set, unless it is abstract.
1889 // If this set has exactly one element, return that element.
1890 Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass** participant) {
1891 // Return nullptr if m is marked old; must have been a redefined method.
1892 if (m->is_old()) {
1893 return nullptr;
1894 }
1895 if (m->is_default_method()) {
1896 return nullptr; // not supported
1897 }
1898 assert(verify_method_context(ctxk, m), "proper context");
1899 ConcreteMethodFinder wf(m);
1900 wf.record_witnesses(1);
1901 Klass* wit = wf.find_witness(ctxk);
1902 if (wit != nullptr) return nullptr; // Too many witnesses.
1903 Method* fm = wf.found_method(0); // Will be nullptr if num_parts == 0.
1904 if (participant != nullptr) {
1905 (*participant) = wf.participant(0);
1906 }
1907 if (!Dependencies::is_concrete_method(fm, nullptr)) {
1908 fm = nullptr; // ignore abstract methods
1909 }
1910 if (Dependencies::is_concrete_method(m, ctxk)) {
1911 if (fm == nullptr) {
1912 // It turns out that m was always the only implementation.
1913 fm = m;
1914 } else if (fm != m) {
1915 // Two conflicting implementations after all.
1916 // (This can happen if m is inherited into ctxk and fm overrides it.)
1917 return nullptr;
1918 }
1919 } else if (Dependencies::find_witness_AME(ctxk, fm) != nullptr) {
1920 // Found a concrete subtype which does not override abstract root method.
1921 return nullptr;
1922 } else if (!overrides(fm, m)) {
1923 // Found method doesn't override abstract root method.
1924 return nullptr;
1925 }
1926 assert(Dependencies::is_concrete_root_method(fm, ctxk) == Dependencies::is_concrete_method(m, ctxk), "mismatch");
1927 #ifndef PRODUCT
1928 // Make sure the dependency mechanism will pass this discovery:
1929 if (VerifyDependencies && fm != nullptr) {
1930 guarantee(nullptr == (void *)check_unique_concrete_method(ctxk, fm),
1931 "verify dep.");
1932 }
1933 #endif //PRODUCT
1934 return fm;
1935 }
1936
1937 // If a class (or interface) has a unique concrete method uniqm, return nullptr.
1938 // Otherwise, return a class that contains an interfering method.
1939 Klass* Dependencies::check_unique_concrete_method(InstanceKlass* ctxk,
1940 Method* uniqm,
1941 Klass* resolved_klass,
1942 Method* resolved_method,
1943 KlassDepChange* changes) {
1944 assert(!ctxk->is_interface() || ctxk == resolved_klass, "sanity");
1945 assert(!resolved_method->can_be_statically_bound() || resolved_method == uniqm, "sanity");
1946 assert(resolved_klass->is_subtype_of(resolved_method->method_holder()), "sanity");
1947
1948 if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1949 !resolved_method->method_holder()->is_linked() ||
1950 resolved_method->can_be_statically_bound()) {
1951 // Dependency is redundant, but benign. Just keep it to avoid unnecessary recompilation.
1952 return nullptr; // no vtable index available
1953 }
1954
1955 LinkedConcreteMethodFinder mf(InstanceKlass::cast(resolved_klass), resolved_method, uniqm);
1956 return mf.find_witness(ctxk, changes);
1957 }
1958
1959 // Find the set of all non-abstract methods under ctxk that match m.
1960 // (The method m must be defined or inherited in ctxk.)
1961 // Include m itself in the set, unless it is abstract.
1962 // If this set has exactly one element, return that element.
1963 // Not yet linked subclasses of ctxk are ignored since they don't have any instances yet.
1964 // Additionally, resolved_klass and resolved_method complete the description of the call site being analyzed.
1965 Method* Dependencies::find_unique_concrete_method(InstanceKlass* ctxk, Method* m, Klass* resolved_klass, Method* resolved_method) {
1966 // Return nullptr if m is marked old; must have been a redefined method.
1967 if (m->is_old()) {
1968 return nullptr;
1969 }
1970 if (!InstanceKlass::cast(resolved_klass)->is_linked() ||
1971 !resolved_method->method_holder()->is_linked() ||
1972 resolved_method->can_be_statically_bound()) {
1973 return m; // nothing to do: no witness under ctxk
1974 }
1975 LinkedConcreteMethodFinder wf(InstanceKlass::cast(resolved_klass), resolved_method);
1976 assert(Dependencies::verify_method_context(ctxk, m), "proper context");
1977 wf.record_witnesses(1);
1978 Klass* wit = wf.find_witness(ctxk);
1979 if (wit != nullptr) {
1980 return nullptr; // Too many witnesses.
1981 }
1982 // p == nullptr when no participants are found (wf.num_participants() == 0).
1983 // fm == nullptr case has 2 meanings:
1984 // * when p == nullptr: no method found;
1985 // * when p != nullptr: AbstractMethodError-throwing method found.
1986 // Also, found method should always be accompanied by a participant class.
1987 Klass* p = wf.participant(0);
1988 Method* fm = wf.found_method(0);
1989 assert(fm == nullptr || p != nullptr, "no participant");
1990 // Normalize all error-throwing cases to nullptr.
1991 if (fm == Universe::throw_illegal_access_error() ||
1992 fm == Universe::throw_no_such_method_error() ||
1993 !Dependencies::is_concrete_method(fm, p)) {
1994 fm = nullptr; // error-throwing method
1995 }
1996 if (Dependencies::is_concrete_method(m, ctxk)) {
1997 if (p == nullptr) {
1998 // It turns out that m was always the only implementation.
1999 assert(fm == nullptr, "sanity");
2000 fm = m;
2001 }
2002 }
2003 #ifndef PRODUCT
2004 // Make sure the dependency mechanism will pass this discovery:
2005 if (VerifyDependencies && fm != nullptr) {
2006 guarantee(nullptr == check_unique_concrete_method(ctxk, fm, resolved_klass, resolved_method),
2007 "verify dep.");
2008 }
2009 #endif // PRODUCT
2010 assert(fm == nullptr || !fm->is_abstract(), "sanity");
2011 // Old CHA conservatively reports concrete methods in abstract classes
2012 // irrespective of whether they have concrete subclasses or not.
2013 // Also, abstract root method case is not fully supported.
2014 #ifdef ASSERT
2015 Klass* uniqp = nullptr;
2016 Method* uniqm = Dependencies::find_unique_concrete_method(ctxk, m, &uniqp);
2017 assert(uniqm == nullptr || uniqm == fm ||
2018 m->is_abstract() ||
2019 uniqm->method_holder()->is_abstract() ||
2020 (fm == nullptr && uniqm != nullptr && uniqp != nullptr && !InstanceKlass::cast(uniqp)->is_linked()),
2021 "sanity");
2022 #endif // ASSERT
2023 return fm;
2024 }
2025
2026 Klass* Dependencies::check_has_no_finalizable_subclasses(InstanceKlass* ctxk, NewKlassDepChange* changes) {
2027 InstanceKlass* search_at = ctxk;
2028 if (changes != nullptr) {
2029 search_at = changes->new_type(); // just look at the new bit
2030 }
2031 return find_finalizable_subclass(search_at);
2032 }
2033
2034 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
2035 assert(call_site != nullptr, "sanity");
2036 assert(method_handle != nullptr, "sanity");
2037 assert(call_site->is_a(vmClasses::CallSite_klass()), "sanity");
2038
2039 if (changes == nullptr) {
2040 // Validate all CallSites
2041 if (java_lang_invoke_CallSite::target(call_site) != method_handle)
2042 return call_site->klass(); // assertion failed
2043 } else {
2044 // Validate the given CallSite
2045 if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
2046 assert(method_handle != changes->method_handle(), "must be");
2047 return call_site->klass(); // assertion failed
2048 }
2049 }
2050 return nullptr; // assertion still valid
2051 }
2052
2053 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
2054 if (_verify_in_progress) return; // don't log
2055 if (witness != nullptr) {
2056 LogTarget(Debug, dependencies) lt;
2057 if (lt.is_enabled()) {
2058 LogStream ls(<);
2059 print_dependency(&ls, witness, /*verbose=*/ true);
2060 }
2061 // The following is a no-op unless logging is enabled:
2062 log_dependency(witness);
2063 }
2064 }
2065
2066 Klass* Dependencies::DepStream::check_new_klass_dependency(NewKlassDepChange* changes) {
2067 assert_locked_or_safepoint(Compile_lock);
2068 Dependencies::check_valid_dependency_type(type());
2069
2070 Klass* witness = nullptr;
2071 switch (type()) {
2072 case evol_method:
2073 witness = check_evol_method(method_argument(0));
2074 break;
2075 case leaf_type:
2076 witness = check_leaf_type(context_type());
2077 break;
2078 case abstract_with_unique_concrete_subtype:
2079 witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
2080 break;
2081 case unique_concrete_method_2:
2082 witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
2083 break;
2084 case unique_concrete_method_4:
2085 witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
2086 break;
2087 case unique_implementor:
2088 witness = check_unique_implementor(context_type(), type_argument(1), changes);
2089 break;
2090 case no_finalizable_subclasses:
2091 witness = check_has_no_finalizable_subclasses(context_type(), changes);
2092 break;
2093 default:
2094 witness = nullptr;
2095 break;
2096 }
2097 trace_and_log_witness(witness);
2098 return witness;
2099 }
2100
2101 Klass* Dependencies::DepStream::check_klass_init_dependency(KlassInitDepChange* changes) {
2102 assert_locked_or_safepoint(Compile_lock);
2103 Dependencies::check_valid_dependency_type(type());
2104
2105 // No new types added. Only unique_concrete_method_4 is sensitive to class initialization changes.
2106 Klass* witness = nullptr;
2107 switch (type()) {
2108 case unique_concrete_method_4:
2109 witness = check_unique_concrete_method(context_type(), method_argument(1), type_argument(2), method_argument(3), changes);
2110 break;
2111 default:
2112 witness = nullptr;
2113 break;
2114 }
2115 trace_and_log_witness(witness);
2116 return witness;
2117 }
2118
2119 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
2120 assert_locked_or_safepoint(Compile_lock);
2121 Dependencies::check_valid_dependency_type(type());
2122
2123 if (changes != nullptr) {
2124 if (changes->is_klass_init_change()) {
2125 return check_klass_init_dependency(changes->as_klass_init_change());
2126 } else {
2127 return check_new_klass_dependency(changes->as_new_klass_change());
2128 }
2129 } else {
2130 Klass* witness = check_new_klass_dependency(nullptr);
2131 // check_klass_init_dependency duplicates check_new_klass_dependency checks when class hierarchy change info is absent.
2132 assert(witness != nullptr || check_klass_init_dependency(nullptr) == nullptr, "missed dependency");
2133 return witness;
2134 }
2135 }
2136
2137 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
2138 assert_locked_or_safepoint(Compile_lock);
2139 Dependencies::check_valid_dependency_type(type());
2140
2141 Klass* witness = nullptr;
2142 switch (type()) {
2143 case call_site_target_value:
2144 witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
2145 break;
2146 default:
2147 witness = nullptr;
2148 break;
2149 }
2150 trace_and_log_witness(witness);
2151 return witness;
2152 }
2153
2154
2155 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
2156 // Handle klass dependency
2157 if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
2158 return check_klass_dependency(changes.as_klass_change());
2159
2160 // Handle CallSite dependency
2161 if (changes.is_call_site_change())
2162 return check_call_site_dependency(changes.as_call_site_change());
2163
2164 // irrelevant dependency; skip it
2165 return nullptr;
2166 }
2167
2168
2169 void DepChange::print() { print_on(tty); }
2170
2171 void DepChange::print_on(outputStream* st) {
2172 int nsup = 0, nint = 0;
2173 for (ContextStream str(*this); str.next(); ) {
2174 InstanceKlass* k = str.klass();
2175 switch (str.change_type()) {
2176 case Change_new_type:
2177 st->print_cr(" dependee = %s", k->external_name());
2178 break;
2179 case Change_new_sub:
2180 if (!WizardMode) {
2181 ++nsup;
2182 } else {
2183 st->print_cr(" context super = %s", k->external_name());
2184 }
2185 break;
2186 case Change_new_impl:
2187 if (!WizardMode) {
2188 ++nint;
2189 } else {
2190 st->print_cr(" context interface = %s", k->external_name());
2191 }
2192 break;
2193 default:
2194 break;
2195 }
2196 }
2197 if (nsup + nint != 0) {
2198 st->print_cr(" context supers = %d, interfaces = %d", nsup, nint);
2199 }
2200 }
2201
2202 void DepChange::ContextStream::start() {
2203 InstanceKlass* type = (_changes.is_klass_change() ? _changes.as_klass_change()->type() : (InstanceKlass*) nullptr);
2204 _change_type = (type == nullptr ? NO_CHANGE : Start_Klass);
2205 _klass = type;
2206 _ti_base = nullptr;
2207 _ti_index = 0;
2208 _ti_limit = 0;
2209 }
2210
2211 bool DepChange::ContextStream::next() {
2212 switch (_change_type) {
2213 case Start_Klass: // initial state; _klass is the new type
2214 _ti_base = _klass->transitive_interfaces();
2215 _ti_index = 0;
2216 _change_type = Change_new_type;
2217 return true;
2218 case Change_new_type:
2219 // fall through:
2220 _change_type = Change_new_sub;
2221 case Change_new_sub:
2222 // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
2223 {
2224 _klass = _klass->java_super();
2225 if (_klass != nullptr) {
2226 return true;
2227 }
2228 }
2229 // else set up _ti_limit and fall through:
2230 _ti_limit = (_ti_base == nullptr) ? 0 : _ti_base->length();
2231 _change_type = Change_new_impl;
2232 case Change_new_impl:
2233 if (_ti_index < _ti_limit) {
2234 _klass = _ti_base->at(_ti_index++);
2235 return true;
2236 }
2237 // fall through:
2238 _change_type = NO_CHANGE; // iterator is exhausted
2239 case NO_CHANGE:
2240 break;
2241 default:
2242 ShouldNotReachHere();
2243 }
2244 return false;
2245 }
2246
2247 void KlassDepChange::initialize() {
2248 // entire transaction must be under this lock:
2249 assert_lock_strong(Compile_lock);
2250
2251 // Mark all dependee and all its superclasses
2252 // Mark transitive interfaces
2253 for (ContextStream str(*this); str.next(); ) {
2254 InstanceKlass* d = str.klass();
2255 assert(!d->is_marked_dependent(), "checking");
2256 d->set_is_marked_dependent(true);
2257 }
2258 }
2259
2260 KlassDepChange::~KlassDepChange() {
2261 // Unmark all dependee and all its superclasses
2262 // Unmark transitive interfaces
2263 for (ContextStream str(*this); str.next(); ) {
2264 InstanceKlass* d = str.klass();
2265 d->set_is_marked_dependent(false);
2266 }
2267 }
2268
2269 bool KlassDepChange::involves_context(Klass* k) {
2270 if (k == nullptr || !k->is_instance_klass()) {
2271 return false;
2272 }
2273 InstanceKlass* ik = InstanceKlass::cast(k);
2274 bool is_contained = ik->is_marked_dependent();
2275 assert(is_contained == type()->is_subtype_of(k),
2276 "correct marking of potential context types");
2277 return is_contained;
2278 }
2279
2280 void Dependencies::print_statistics() {
2281 AbstractClassHierarchyWalker::print_statistics();
2282 }
2283
2284 void AbstractClassHierarchyWalker::print_statistics() {
2285 if (UsePerfData) {
2286 jlong deps_find_witness_calls = _perf_find_witness_anywhere_calls_count->get_value();
2287 jlong deps_find_witness_steps = _perf_find_witness_anywhere_steps_count->get_value();
2288 jlong deps_find_witness_singles = _perf_find_witness_in_calls_count->get_value();
2289
2290 ttyLocker ttyl;
2291 tty->print_cr("Dependency check (find_witness) "
2292 "calls=" JLONG_FORMAT ", steps=" JLONG_FORMAT " (avg=%.1f), singles=" JLONG_FORMAT,
2293 deps_find_witness_calls,
2294 deps_find_witness_steps,
2295 (double)deps_find_witness_steps / deps_find_witness_calls,
2296 deps_find_witness_singles);
2297 if (xtty != nullptr) {
2298 xtty->elem("deps_find_witness calls='" JLONG_FORMAT "' steps='" JLONG_FORMAT "' singles='" JLONG_FORMAT "'",
2299 deps_find_witness_calls,
2300 deps_find_witness_steps,
2301 deps_find_witness_singles);
2302 }
2303 }
2304 }
2305
2306 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2307 _call_site(call_site),
2308 _method_handle(method_handle) {
2309 assert(_call_site()->is_a(vmClasses::CallSite_klass()), "must be");
2310 assert(_method_handle.is_null() || _method_handle()->is_a(vmClasses::MethodHandle_klass()), "must be");
2311 }
2312
2313 void dependencies_init() {
2314 AbstractClassHierarchyWalker::init();
2315 }