6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciMethodData.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "compiler/compilationPolicy.hpp"
29 #include "compiler/compilerDefinitions.inline.hpp"
30 #include "compiler/compilerOracle.hpp"
31 #include "interpreter/bytecode.hpp"
32 #include "interpreter/bytecodeStream.hpp"
33 #include "interpreter/linkResolver.hpp"
34 #include "memory/metaspaceClosure.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "oops/methodData.inline.hpp"
38 #include "prims/jvmtiRedefineClasses.hpp"
39 #include "runtime/atomic.hpp"
40 #include "runtime/deoptimization.hpp"
41 #include "runtime/handles.inline.hpp"
42 #include "runtime/orderAccess.hpp"
43 #include "runtime/safepointVerifiers.hpp"
44 #include "runtime/signature.hpp"
45 #include "utilities/align.hpp"
301 #ifdef ASSERT
302 ResourceMark rm;
303 ReferenceArgumentCount rac(inv.signature());
304 int count = MIN2(rac.count(), (int)TypeProfileArgsLimit);
305 assert(count > 0, "room for args type but none found?");
306 check_number_of_arguments(count);
307 #endif
308 _args.post_initialize(inv.signature(), inv.has_receiver(), false);
309 }
310
311 if (has_return()) {
312 assert(is_reference_type(inv.result_type()), "room for a ret type but doesn't return obj?");
313 _ret.post_initialize();
314 }
315 }
316
317 void TypeStackSlotEntries::clean_weak_klass_links(bool always_clean) {
318 for (int i = 0; i < _number_of_entries; i++) {
319 intptr_t p = type(i);
320 Klass* k = (Klass*)klass_part(p);
321 if (k != nullptr && (always_clean || !k->is_loader_alive())) {
322 set_type(i, with_status((Klass*)nullptr, p));
323 }
324 }
325 }
326
327 void ReturnTypeEntry::clean_weak_klass_links(bool always_clean) {
328 intptr_t p = type();
329 Klass* k = (Klass*)klass_part(p);
330 if (k != nullptr && (always_clean || !k->is_loader_alive())) {
331 set_type(with_status((Klass*)nullptr, p));
332 }
333 }
334
335 bool TypeEntriesAtCall::return_profiling_enabled() {
336 return MethodData::profile_return();
337 }
338
339 bool TypeEntriesAtCall::arguments_profiling_enabled() {
340 return MethodData::profile_arguments();
341 }
342
343 void TypeEntries::print_klass(outputStream* st, intptr_t k) {
344 if (is_type_none(k)) {
345 st->print("none");
346 } else if (is_type_unknown(k)) {
347 st->print("unknown");
348 } else {
349 valid_klass(k)->print_value_on(st);
350 }
351 if (was_null_seen(k)) {
352 st->print(" (null seen)");
353 }
354 }
390 _args.print_data_on(st);
391 }
392 if (has_return()) {
393 tab(st, true);
394 st->print("return type");
395 _ret.print_data_on(st);
396 }
397 }
398
399 // ==================================================================
400 // ReceiverTypeData
401 //
402 // A ReceiverTypeData is used to access profiling information about a
403 // dynamic type check. It consists of a counter which counts the total times
404 // that the check is reached, and a series of (Klass*, count) pairs
405 // which are used to store a type profile for the receiver of the check.
406
407 void ReceiverTypeData::clean_weak_klass_links(bool always_clean) {
408 for (uint row = 0; row < row_limit(); row++) {
409 Klass* p = receiver(row);
410 if (p != nullptr && (always_clean || !p->is_loader_alive())) {
411 clear_row(row);
412 }
413 }
414 }
415
416 void ReceiverTypeData::print_receiver_data_on(outputStream* st) const {
417 uint row;
418 int entries = 0;
419 for (row = 0; row < row_limit(); row++) {
420 if (receiver(row) != nullptr) entries++;
421 }
422 st->print_cr("count(%u) entries(%u)", count(), entries);
423 int total = count();
424 for (row = 0; row < row_limit(); row++) {
425 if (receiver(row) != nullptr) {
426 total += receiver_count(row);
427 }
428 }
429 for (row = 0; row < row_limit(); row++) {
430 if (receiver(row) != nullptr) {
431 tab(st);
432 receiver(row)->print_value_on(st);
433 st->print_cr("(%u %4.2f)", receiver_count(row), (float) receiver_count(row) / (float) total);
434 }
435 }
624 return obj_args + 1; // 1 cell for array len
625 }
626 return 0;
627 }
628
629 void ParametersTypeData::post_initialize(BytecodeStream* stream, MethodData* mdo) {
630 _parameters.post_initialize(mdo->method()->signature(), !mdo->method()->is_static(), true);
631 }
632
633 bool ParametersTypeData::profiling_enabled() {
634 return MethodData::profile_parameters();
635 }
636
637 void ParametersTypeData::print_data_on(outputStream* st, const char* extra) const {
638 print_shared(st, "ParametersTypeData", extra);
639 tab(st);
640 _parameters.print_data_on(st);
641 st->cr();
642 }
643
644 void SpeculativeTrapData::print_data_on(outputStream* st, const char* extra) const {
645 print_shared(st, "SpeculativeTrapData", extra);
646 tab(st);
647 method()->print_short_name(st);
648 st->cr();
649 }
650
651 // ==================================================================
652 // MethodData*
653 //
654 // A MethodData* holds information which has been collected about
655 // a method.
656
657 MethodData* MethodData::allocate(ClassLoaderData* loader_data, const methodHandle& method, TRAPS) {
658 assert(!THREAD->owns_locks(), "Should not own any locks");
659 int size = MethodData::compute_allocation_size_in_words(method);
660
661 return new (loader_data, size, MetaspaceObj::MethodDataType, THREAD)
662 MethodData(method);
663 }
1201
1202 // Give each of the data entries a chance to perform specific
1203 // data initialization.
1204 void MethodData::post_initialize(BytecodeStream* stream) {
1205 ResourceMark rm;
1206 ProfileData* data;
1207 for (data = first_data(); is_valid(data); data = next_data(data)) {
1208 stream->set_start(data->bci());
1209 stream->next();
1210 data->post_initialize(stream, this);
1211 }
1212 if (_parameters_type_data_di != no_parameters) {
1213 parameters_type_data()->post_initialize(nullptr, this);
1214 }
1215 }
1216
1217 // Initialize the MethodData* corresponding to a given method.
1218 MethodData::MethodData(const methodHandle& method)
1219 : _method(method()),
1220 // Holds Compile_lock
1221 _extra_data_lock(Mutex::nosafepoint, "MDOExtraData_lock"),
1222 _compiler_counters(),
1223 _parameters_type_data_di(parameters_uninitialized) {
1224 initialize();
1225 }
1226
1227 void MethodData::initialize() {
1228 Thread* thread = Thread::current();
1229 NoSafepointVerifier no_safepoint; // init function atomic wrt GC
1230 ResourceMark rm(thread);
1231
1232 init();
1233 set_creation_mileage(mileage_of(method()));
1234
1235 // Go through the bytecodes and allocate and initialize the
1236 // corresponding data cells.
1237 int data_size = 0;
1238 int empty_bc_count = 0; // number of bytecodes lacking data
1239 _data[0] = 0; // apparently not set below.
1240 BytecodeStream stream(methodHandle(thread, method()));
1241 Bytecodes::Code c;
1242 bool needs_speculative_traps = false;
1243 while ((c = stream.next()) >= 0) {
1244 int size_in_bytes = initialize_data(&stream, data_size);
1326 _tenure_traps = 0;
1327 _num_loops = 0;
1328 _num_blocks = 0;
1329 _would_profile = unknown;
1330
1331 #if INCLUDE_JVMCI
1332 _jvmci_ir_size = 0;
1333 _failed_speculations = nullptr;
1334 #endif
1335
1336 // Initialize escape flags.
1337 clear_escape_info();
1338 }
1339
1340 // Get a measure of how much mileage the method has on it.
1341 int MethodData::mileage_of(Method* method) {
1342 return MAX2(method->invocation_count(), method->backedge_count());
1343 }
1344
1345 bool MethodData::is_mature() const {
1346 return CompilationPolicy::is_mature(_method);
1347 }
1348
1349 // Translate a bci to its corresponding data index (di).
1350 address MethodData::bci_to_dp(int bci) {
1351 ResourceMark rm;
1352 DataLayout* data = data_layout_before(bci);
1353 DataLayout* prev = nullptr;
1354 for ( ; is_valid(data); data = next_data_layout(data)) {
1355 if (data->bci() >= bci) {
1356 if (data->bci() == bci) set_hint_di(dp_to_di((address)data));
1357 else if (prev != nullptr) set_hint_di(dp_to_di((address)prev));
1358 return (address)data;
1359 }
1360 prev = data;
1361 }
1362 return (address)limit_data_position();
1363 }
1364
1365 // Translate a bci to its corresponding data, or null.
1366 ProfileData* MethodData::bci_to_data(int bci) {
1684 }
1685
1686 bool MethodData::profile_all_parameters() {
1687 return profile_parameters_flag() == type_profile_all;
1688 }
1689
1690 bool MethodData::profile_parameters_for_method(const methodHandle& m) {
1691 if (!profile_parameters()) {
1692 return false;
1693 }
1694
1695 if (profile_all_parameters()) {
1696 return true;
1697 }
1698
1699 assert(profile_parameters_jsr292_only(), "inconsistent");
1700 return m->is_compiled_lambda_form();
1701 }
1702
1703 void MethodData::metaspace_pointers_do(MetaspaceClosure* it) {
1704 log_trace(cds)("Iter(MethodData): %p", this);
1705 it->push(&_method);
1706 }
1707
1708 void MethodData::clean_extra_data_helper(DataLayout* dp, int shift, bool reset) {
1709 check_extra_data_locked();
1710
1711 if (shift == 0) {
1712 return;
1713 }
1714 if (!reset) {
1715 // Move all cells of trap entry at dp left by "shift" cells
1716 intptr_t* start = (intptr_t*)dp;
1717 intptr_t* end = (intptr_t*)next_extra(dp);
1718 for (intptr_t* ptr = start; ptr < end; ptr++) {
1719 *(ptr-shift) = *ptr;
1720 }
1721 } else {
1722 // Reset "shift" cells stopping at dp
1723 intptr_t* start = ((intptr_t*)dp) - shift;
1724 intptr_t* end = (intptr_t*)dp;
1725 for (intptr_t* ptr = start; ptr < end; ptr++) {
1726 *ptr = 0;
1727 }
1728 }
1729 }
1730
1731 // Check for entries that reference an unloaded method
1732 class CleanExtraDataKlassClosure : public CleanExtraDataClosure {
1733 bool _always_clean;
1734 public:
1735 CleanExtraDataKlassClosure(bool always_clean) : _always_clean(always_clean) {}
1736 bool is_live(Method* m) {
1737 return !(_always_clean) && m->method_holder()->is_loader_alive();
1738 }
1739 };
1740
1741 // Check for entries that reference a redefined method
1742 class CleanExtraDataMethodClosure : public CleanExtraDataClosure {
1743 public:
1744 CleanExtraDataMethodClosure() {}
1745 bool is_live(Method* m) { return !m->is_old(); }
1746 };
1747
1748
1749 // Remove SpeculativeTrapData entries that reference an unloaded or
1750 // redefined method
1751 void MethodData::clean_extra_data(CleanExtraDataClosure* cl) {
1752 check_extra_data_locked();
1753
1754 DataLayout* dp = extra_data_base();
1755 DataLayout* end = args_data_limit();
1756
1757 int shift = 0;
1758 for (; dp < end; dp = next_extra(dp)) {
1759 switch(dp->tag()) {
1760 case DataLayout::speculative_trap_data_tag: {
1761 SpeculativeTrapData* data = new SpeculativeTrapData(dp);
1762 Method* m = data->method();
1763 assert(m != nullptr, "should have a method");
1764 if (!cl->is_live(m)) {
1765 // "shift" accumulates the number of cells for dead
1766 // SpeculativeTrapData entries that have been seen so
1767 // far. Following entries must be shifted left by that many
1848 ResourceMark rm;
1849 CleanExtraDataMethodClosure cl;
1850
1851 // Lock to modify extra data, and prevent Safepoint from breaking the lock
1852 MutexLocker ml(extra_data_lock(), Mutex::_no_safepoint_check_flag);
1853
1854 clean_extra_data(&cl);
1855 verify_extra_data_clean(&cl);
1856 }
1857
1858 void MethodData::deallocate_contents(ClassLoaderData* loader_data) {
1859 release_C_heap_structures();
1860 }
1861
1862 void MethodData::release_C_heap_structures() {
1863 #if INCLUDE_JVMCI
1864 FailedSpeculation::free_failed_speculations(get_failed_speculations_address());
1865 #endif
1866 }
1867
1868 #ifdef ASSERT
1869 void MethodData::check_extra_data_locked() const {
1870 // Cast const away, just to be able to verify the lock
1871 // Usually we only want non-const accesses on the lock,
1872 // so this here is an exception.
1873 MethodData* self = (MethodData*)this;
1874 assert(self->extra_data_lock()->owned_by_self(), "must have lock");
1875 assert(!Thread::current()->is_Java_thread() ||
1876 JavaThread::current()->is_in_no_safepoint_scope(),
1877 "JavaThread must have NoSafepointVerifier inside lock scope");
1878 }
1879 #endif
|
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/cdsConfig.hpp"
27 #include "ci/ciMethodData.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compilationPolicy.hpp"
30 #include "compiler/compilerDefinitions.inline.hpp"
31 #include "compiler/compilerOracle.hpp"
32 #include "interpreter/bytecode.hpp"
33 #include "interpreter/bytecodeStream.hpp"
34 #include "interpreter/linkResolver.hpp"
35 #include "memory/metaspaceClosure.hpp"
36 #include "memory/resourceArea.hpp"
37 #include "oops/klass.inline.hpp"
38 #include "oops/methodData.inline.hpp"
39 #include "prims/jvmtiRedefineClasses.hpp"
40 #include "runtime/atomic.hpp"
41 #include "runtime/deoptimization.hpp"
42 #include "runtime/handles.inline.hpp"
43 #include "runtime/orderAccess.hpp"
44 #include "runtime/safepointVerifiers.hpp"
45 #include "runtime/signature.hpp"
46 #include "utilities/align.hpp"
302 #ifdef ASSERT
303 ResourceMark rm;
304 ReferenceArgumentCount rac(inv.signature());
305 int count = MIN2(rac.count(), (int)TypeProfileArgsLimit);
306 assert(count > 0, "room for args type but none found?");
307 check_number_of_arguments(count);
308 #endif
309 _args.post_initialize(inv.signature(), inv.has_receiver(), false);
310 }
311
312 if (has_return()) {
313 assert(is_reference_type(inv.result_type()), "room for a ret type but doesn't return obj?");
314 _ret.post_initialize();
315 }
316 }
317
318 void TypeStackSlotEntries::clean_weak_klass_links(bool always_clean) {
319 for (int i = 0; i < _number_of_entries; i++) {
320 intptr_t p = type(i);
321 Klass* k = (Klass*)klass_part(p);
322 if (k != nullptr) {
323 if (!always_clean && k->is_instance_klass() && InstanceKlass::cast(k)->is_not_initialized()) {
324 continue; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
325 }
326 if (always_clean || !k->is_loader_alive()) {
327 set_type(i, with_status((Klass*)nullptr, p));
328 }
329 }
330 }
331 }
332
333 void TypeStackSlotEntries::metaspace_pointers_do(MetaspaceClosure* it) {
334 for (int i = 0; i < _number_of_entries; i++) {
335 set_type(i, klass_part(type(i))); // reset tag; FIXME: properly handle tagged pointers
336 Klass** k = (Klass**)type_adr(i);
337 it->push(k);
338 // it->push_tagged(k);
339 }
340 }
341
342 void ReturnTypeEntry::clean_weak_klass_links(bool always_clean) {
343 intptr_t p = type();
344 Klass* k = (Klass*)klass_part(p);
345 if (k != nullptr) {
346 if (!always_clean && k->is_instance_klass() && InstanceKlass::cast(k)->is_not_initialized()) {
347 return; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
348 }
349 if (always_clean || !k->is_loader_alive()) {
350 set_type(with_status((Klass*)nullptr, p));
351 }
352 }
353 }
354
355 void ReturnTypeEntry::metaspace_pointers_do(MetaspaceClosure* it) {
356 Klass** k = (Klass**)type_adr(); // tagged
357 set_type(klass_part(type())); // reset tag; FIXME: properly handle tagged pointers
358 it->push(k);
359 // it->push_tagged(k);
360 }
361
362 bool TypeEntriesAtCall::return_profiling_enabled() {
363 return MethodData::profile_return();
364 }
365
366 bool TypeEntriesAtCall::arguments_profiling_enabled() {
367 return MethodData::profile_arguments();
368 }
369
370 void TypeEntries::print_klass(outputStream* st, intptr_t k) {
371 if (is_type_none(k)) {
372 st->print("none");
373 } else if (is_type_unknown(k)) {
374 st->print("unknown");
375 } else {
376 valid_klass(k)->print_value_on(st);
377 }
378 if (was_null_seen(k)) {
379 st->print(" (null seen)");
380 }
381 }
417 _args.print_data_on(st);
418 }
419 if (has_return()) {
420 tab(st, true);
421 st->print("return type");
422 _ret.print_data_on(st);
423 }
424 }
425
426 // ==================================================================
427 // ReceiverTypeData
428 //
429 // A ReceiverTypeData is used to access profiling information about a
430 // dynamic type check. It consists of a counter which counts the total times
431 // that the check is reached, and a series of (Klass*, count) pairs
432 // which are used to store a type profile for the receiver of the check.
433
434 void ReceiverTypeData::clean_weak_klass_links(bool always_clean) {
435 for (uint row = 0; row < row_limit(); row++) {
436 Klass* p = receiver(row);
437 if (p != nullptr) {
438 if (!always_clean && p->is_instance_klass() && InstanceKlass::cast(p)->is_not_initialized()) {
439 continue; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
440 }
441 if (always_clean || !p->is_loader_alive()) {
442 clear_row(row);
443 }
444 }
445 }
446 }
447
448 void ReceiverTypeData::metaspace_pointers_do(MetaspaceClosure *it) {
449 for (uint row = 0; row < row_limit(); row++) {
450 Klass** recv = (Klass**)intptr_at_adr(receiver_cell_index(row));
451 it->push(recv);
452 }
453 }
454
455 void ReceiverTypeData::print_receiver_data_on(outputStream* st) const {
456 uint row;
457 int entries = 0;
458 for (row = 0; row < row_limit(); row++) {
459 if (receiver(row) != nullptr) entries++;
460 }
461 st->print_cr("count(%u) entries(%u)", count(), entries);
462 int total = count();
463 for (row = 0; row < row_limit(); row++) {
464 if (receiver(row) != nullptr) {
465 total += receiver_count(row);
466 }
467 }
468 for (row = 0; row < row_limit(); row++) {
469 if (receiver(row) != nullptr) {
470 tab(st);
471 receiver(row)->print_value_on(st);
472 st->print_cr("(%u %4.2f)", receiver_count(row), (float) receiver_count(row) / (float) total);
473 }
474 }
663 return obj_args + 1; // 1 cell for array len
664 }
665 return 0;
666 }
667
668 void ParametersTypeData::post_initialize(BytecodeStream* stream, MethodData* mdo) {
669 _parameters.post_initialize(mdo->method()->signature(), !mdo->method()->is_static(), true);
670 }
671
672 bool ParametersTypeData::profiling_enabled() {
673 return MethodData::profile_parameters();
674 }
675
676 void ParametersTypeData::print_data_on(outputStream* st, const char* extra) const {
677 print_shared(st, "ParametersTypeData", extra);
678 tab(st);
679 _parameters.print_data_on(st);
680 st->cr();
681 }
682
683 void SpeculativeTrapData::metaspace_pointers_do(MetaspaceClosure* it) {
684 Method** m = (Method**)intptr_at_adr(speculative_trap_method);
685 it->push(m);
686 }
687
688 void SpeculativeTrapData::print_data_on(outputStream* st, const char* extra) const {
689 print_shared(st, "SpeculativeTrapData", extra);
690 tab(st);
691 method()->print_short_name(st);
692 st->cr();
693 }
694
695 // ==================================================================
696 // MethodData*
697 //
698 // A MethodData* holds information which has been collected about
699 // a method.
700
701 MethodData* MethodData::allocate(ClassLoaderData* loader_data, const methodHandle& method, TRAPS) {
702 assert(!THREAD->owns_locks(), "Should not own any locks");
703 int size = MethodData::compute_allocation_size_in_words(method);
704
705 return new (loader_data, size, MetaspaceObj::MethodDataType, THREAD)
706 MethodData(method);
707 }
1245
1246 // Give each of the data entries a chance to perform specific
1247 // data initialization.
1248 void MethodData::post_initialize(BytecodeStream* stream) {
1249 ResourceMark rm;
1250 ProfileData* data;
1251 for (data = first_data(); is_valid(data); data = next_data(data)) {
1252 stream->set_start(data->bci());
1253 stream->next();
1254 data->post_initialize(stream, this);
1255 }
1256 if (_parameters_type_data_di != no_parameters) {
1257 parameters_type_data()->post_initialize(nullptr, this);
1258 }
1259 }
1260
1261 // Initialize the MethodData* corresponding to a given method.
1262 MethodData::MethodData(const methodHandle& method)
1263 : _method(method()),
1264 // Holds Compile_lock
1265 _compiler_counters(),
1266 _parameters_type_data_di(parameters_uninitialized) {
1267 _extra_data_lock = nullptr;
1268 initialize();
1269 }
1270
1271 MethodData::MethodData() {
1272 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
1273 }
1274
1275 void MethodData::initialize() {
1276 Thread* thread = Thread::current();
1277 NoSafepointVerifier no_safepoint; // init function atomic wrt GC
1278 ResourceMark rm(thread);
1279
1280 init();
1281 set_creation_mileage(mileage_of(method()));
1282
1283 // Go through the bytecodes and allocate and initialize the
1284 // corresponding data cells.
1285 int data_size = 0;
1286 int empty_bc_count = 0; // number of bytecodes lacking data
1287 _data[0] = 0; // apparently not set below.
1288 BytecodeStream stream(methodHandle(thread, method()));
1289 Bytecodes::Code c;
1290 bool needs_speculative_traps = false;
1291 while ((c = stream.next()) >= 0) {
1292 int size_in_bytes = initialize_data(&stream, data_size);
1374 _tenure_traps = 0;
1375 _num_loops = 0;
1376 _num_blocks = 0;
1377 _would_profile = unknown;
1378
1379 #if INCLUDE_JVMCI
1380 _jvmci_ir_size = 0;
1381 _failed_speculations = nullptr;
1382 #endif
1383
1384 // Initialize escape flags.
1385 clear_escape_info();
1386 }
1387
1388 // Get a measure of how much mileage the method has on it.
1389 int MethodData::mileage_of(Method* method) {
1390 return MAX2(method->invocation_count(), method->backedge_count());
1391 }
1392
1393 bool MethodData::is_mature() const {
1394 return CompilationPolicy::is_mature((MethodData*)this);
1395 }
1396
1397 // Translate a bci to its corresponding data index (di).
1398 address MethodData::bci_to_dp(int bci) {
1399 ResourceMark rm;
1400 DataLayout* data = data_layout_before(bci);
1401 DataLayout* prev = nullptr;
1402 for ( ; is_valid(data); data = next_data_layout(data)) {
1403 if (data->bci() >= bci) {
1404 if (data->bci() == bci) set_hint_di(dp_to_di((address)data));
1405 else if (prev != nullptr) set_hint_di(dp_to_di((address)prev));
1406 return (address)data;
1407 }
1408 prev = data;
1409 }
1410 return (address)limit_data_position();
1411 }
1412
1413 // Translate a bci to its corresponding data, or null.
1414 ProfileData* MethodData::bci_to_data(int bci) {
1732 }
1733
1734 bool MethodData::profile_all_parameters() {
1735 return profile_parameters_flag() == type_profile_all;
1736 }
1737
1738 bool MethodData::profile_parameters_for_method(const methodHandle& m) {
1739 if (!profile_parameters()) {
1740 return false;
1741 }
1742
1743 if (profile_all_parameters()) {
1744 return true;
1745 }
1746
1747 assert(profile_parameters_jsr292_only(), "inconsistent");
1748 return m->is_compiled_lambda_form();
1749 }
1750
1751 void MethodData::metaspace_pointers_do(MetaspaceClosure* it) {
1752 log_trace(cds)("Iter(MethodData): %p for %p %s", this, _method, _method->name_and_sig_as_C_string());
1753 it->push(&_method);
1754 if (_parameters_type_data_di != no_parameters) {
1755 parameters_type_data()->metaspace_pointers_do(it);
1756 }
1757 for (ProfileData* data = first_data(); is_valid(data); data = next_data(data)) {
1758 data->metaspace_pointers_do(it);
1759 }
1760 for (DataLayout* dp = extra_data_base();
1761 dp < extra_data_limit();
1762 dp = MethodData::next_extra(dp)) {
1763 if (dp->tag() == DataLayout::speculative_trap_data_tag) {
1764 ResourceMark rm;
1765 SpeculativeTrapData* data = new SpeculativeTrapData(dp);
1766 data->metaspace_pointers_do(it);
1767 } else if (dp->tag() == DataLayout::no_tag ||
1768 dp->tag() == DataLayout::arg_info_data_tag) {
1769 break;
1770 }
1771 }
1772 }
1773
1774 void MethodData::clean_extra_data_helper(DataLayout* dp, int shift, bool reset) {
1775 check_extra_data_locked();
1776
1777 if (shift == 0) {
1778 return;
1779 }
1780 if (!reset) {
1781 // Move all cells of trap entry at dp left by "shift" cells
1782 intptr_t* start = (intptr_t*)dp;
1783 intptr_t* end = (intptr_t*)next_extra(dp);
1784 for (intptr_t* ptr = start; ptr < end; ptr++) {
1785 *(ptr-shift) = *ptr;
1786 }
1787 } else {
1788 // Reset "shift" cells stopping at dp
1789 intptr_t* start = ((intptr_t*)dp) - shift;
1790 intptr_t* end = (intptr_t*)dp;
1791 for (intptr_t* ptr = start; ptr < end; ptr++) {
1792 *ptr = 0;
1793 }
1794 }
1795 }
1796
1797 // Check for entries that reference an unloaded method
1798 class CleanExtraDataKlassClosure : public CleanExtraDataClosure {
1799 bool _always_clean;
1800 public:
1801 CleanExtraDataKlassClosure(bool always_clean) : _always_clean(always_clean) {}
1802 bool is_live(Method* m) {
1803 if (!_always_clean && m->method_holder()->is_instance_klass() && InstanceKlass::cast(m->method_holder())->is_not_initialized()) {
1804 return true; // TODO: treat as unloaded instead?
1805 }
1806 return !(_always_clean) && m->method_holder()->is_loader_alive();
1807 }
1808 };
1809
1810 // Check for entries that reference a redefined method
1811 class CleanExtraDataMethodClosure : public CleanExtraDataClosure {
1812 public:
1813 CleanExtraDataMethodClosure() {}
1814 bool is_live(Method* m) { return !m->is_old(); }
1815 };
1816
1817 Mutex* MethodData::extra_data_lock() {
1818 Mutex* lock = Atomic::load(&_extra_data_lock);
1819 if (lock == nullptr) {
1820 lock = new Mutex(Mutex::nosafepoint, "MDOExtraData_lock");
1821 Mutex* old = Atomic::cmpxchg(&_extra_data_lock, (Mutex*)nullptr, lock);
1822 if (old != nullptr) {
1823 // Another thread created the lock before us. Use that lock instead.
1824 delete lock;
1825 return old;
1826 }
1827 }
1828 return lock;
1829 }
1830
1831 // Remove SpeculativeTrapData entries that reference an unloaded or
1832 // redefined method
1833 void MethodData::clean_extra_data(CleanExtraDataClosure* cl) {
1834 check_extra_data_locked();
1835
1836 DataLayout* dp = extra_data_base();
1837 DataLayout* end = args_data_limit();
1838
1839 int shift = 0;
1840 for (; dp < end; dp = next_extra(dp)) {
1841 switch(dp->tag()) {
1842 case DataLayout::speculative_trap_data_tag: {
1843 SpeculativeTrapData* data = new SpeculativeTrapData(dp);
1844 Method* m = data->method();
1845 assert(m != nullptr, "should have a method");
1846 if (!cl->is_live(m)) {
1847 // "shift" accumulates the number of cells for dead
1848 // SpeculativeTrapData entries that have been seen so
1849 // far. Following entries must be shifted left by that many
1930 ResourceMark rm;
1931 CleanExtraDataMethodClosure cl;
1932
1933 // Lock to modify extra data, and prevent Safepoint from breaking the lock
1934 MutexLocker ml(extra_data_lock(), Mutex::_no_safepoint_check_flag);
1935
1936 clean_extra_data(&cl);
1937 verify_extra_data_clean(&cl);
1938 }
1939
1940 void MethodData::deallocate_contents(ClassLoaderData* loader_data) {
1941 release_C_heap_structures();
1942 }
1943
1944 void MethodData::release_C_heap_structures() {
1945 #if INCLUDE_JVMCI
1946 FailedSpeculation::free_failed_speculations(get_failed_speculations_address());
1947 #endif
1948 }
1949
1950 #if INCLUDE_CDS
1951 void MethodData::remove_unshareable_info() {
1952 _extra_data_lock = nullptr;
1953 }
1954
1955 void MethodData::restore_unshareable_info(TRAPS) {
1956 //_extra_data_lock = new Mutex(Mutex::nosafepoint, "MDOExtraData_lock");
1957 }
1958 #endif // INCLUDE_CDS
1959
1960 #ifdef ASSERT
1961 void MethodData::check_extra_data_locked() const {
1962 // Cast const away, just to be able to verify the lock
1963 // Usually we only want non-const accesses on the lock,
1964 // so this here is an exception.
1965 MethodData* self = (MethodData*)this;
1966 assert(self->extra_data_lock()->owned_by_self() || CDSConfig::is_dumping_archive(), "must have lock");
1967 assert(!Thread::current()->is_Java_thread() ||
1968 JavaThread::current()->is_in_no_safepoint_scope(),
1969 "JavaThread must have NoSafepointVerifier inside lock scope");
1970 }
1971 #endif
|