1 /* 2 * Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 // no precompiled headers 25 #include "ci/ciUtilities.hpp" 26 #include "compiler/compiler_globals.hpp" 27 #include "compiler/oopMap.hpp" 28 #include "gc/shared/barrierSet.hpp" 29 #include "gc/shared/barrierSetAssembler.hpp" 30 #include "gc/shared/barrierSetNMethod.hpp" 31 #include "gc/shared/cardTable.hpp" 32 #include "gc/shared/collectedHeap.hpp" 33 #include "gc/shared/gc_globals.hpp" 34 #include "gc/shared/tlab_globals.hpp" 35 #if INCLUDE_ZGC 36 #include "gc/x/xBarrierSetRuntime.hpp" 37 #include "gc/x/xThreadLocalData.hpp" 38 #endif 39 #include "jvmci/jvmciCompilerToVM.hpp" 40 #include "jvmci/jvmciEnv.hpp" 41 #include "jvmci/vmStructs_jvmci.hpp" 42 #include "memory/universe.hpp" 43 #include "oops/compressedOops.hpp" 44 #include "oops/klass.inline.hpp" 45 #include "runtime/flags/jvmFlag.hpp" 46 #include "runtime/sharedRuntime.hpp" 47 #include "runtime/stubRoutines.hpp" 48 #include "utilities/resourceHash.hpp" 49 50 51 int CompilerToVM::Data::Klass_vtable_start_offset; 52 int CompilerToVM::Data::Klass_vtable_length_offset; 53 54 int CompilerToVM::Data::Method_extra_stack_entries; 55 56 address CompilerToVM::Data::SharedRuntime_ic_miss_stub; 57 address CompilerToVM::Data::SharedRuntime_handle_wrong_method_stub; 58 address CompilerToVM::Data::SharedRuntime_deopt_blob_unpack; 59 address CompilerToVM::Data::SharedRuntime_deopt_blob_unpack_with_exception_in_tls; 60 address CompilerToVM::Data::SharedRuntime_deopt_blob_uncommon_trap; 61 address CompilerToVM::Data::SharedRuntime_polling_page_return_handler; 62 63 address CompilerToVM::Data::nmethod_entry_barrier; 64 int CompilerToVM::Data::thread_disarmed_guard_value_offset; 65 int CompilerToVM::Data::thread_address_bad_mask_offset; 66 67 address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded; 68 address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded; 69 address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded; 70 address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded; 71 address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded; 72 address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded; 73 address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_oop_array; 74 address CompilerToVM::Data::ZBarrierSetRuntime_clone; 75 76 bool CompilerToVM::Data::continuations_enabled; 77 78 #ifdef AARCH64 79 int CompilerToVM::Data::BarrierSetAssembler_nmethod_patching_type; 80 address CompilerToVM::Data::BarrierSetAssembler_patching_epoch_addr; 81 #endif 82 83 size_t CompilerToVM::Data::ThreadLocalAllocBuffer_alignment_reserve; 84 85 CollectedHeap* CompilerToVM::Data::Universe_collectedHeap; 86 int CompilerToVM::Data::Universe_base_vtable_size; 87 address CompilerToVM::Data::Universe_narrow_oop_base; 88 int CompilerToVM::Data::Universe_narrow_oop_shift; 89 address CompilerToVM::Data::Universe_narrow_klass_base; 90 int CompilerToVM::Data::Universe_narrow_klass_shift; 91 void* CompilerToVM::Data::Universe_non_oop_bits; 92 uintptr_t CompilerToVM::Data::Universe_verify_oop_mask; 93 uintptr_t CompilerToVM::Data::Universe_verify_oop_bits; 94 95 bool CompilerToVM::Data::_supports_inline_contig_alloc; 96 HeapWord** CompilerToVM::Data::_heap_end_addr; 97 HeapWord* volatile* CompilerToVM::Data::_heap_top_addr; 98 int CompilerToVM::Data::_max_oop_map_stack_offset; 99 int CompilerToVM::Data::_fields_annotations_base_offset; 100 101 CardTable::CardValue* CompilerToVM::Data::cardtable_start_address; 102 int CompilerToVM::Data::cardtable_shift; 103 104 size_t CompilerToVM::Data::vm_page_size; 105 106 int CompilerToVM::Data::sizeof_vtableEntry = sizeof(vtableEntry); 107 int CompilerToVM::Data::sizeof_ExceptionTableElement = sizeof(ExceptionTableElement); 108 int CompilerToVM::Data::sizeof_LocalVariableTableElement = sizeof(LocalVariableTableElement); 109 int CompilerToVM::Data::sizeof_ConstantPool = sizeof(ConstantPool); 110 int CompilerToVM::Data::sizeof_narrowKlass = sizeof(narrowKlass); 111 int CompilerToVM::Data::sizeof_arrayOopDesc = sizeof(arrayOopDesc); 112 int CompilerToVM::Data::sizeof_BasicLock = sizeof(BasicLock); 113 114 address CompilerToVM::Data::dsin; 115 address CompilerToVM::Data::dcos; 116 address CompilerToVM::Data::dtan; 117 address CompilerToVM::Data::dexp; 118 address CompilerToVM::Data::dlog; 119 address CompilerToVM::Data::dlog10; 120 address CompilerToVM::Data::dpow; 121 122 address CompilerToVM::Data::symbol_init; 123 address CompilerToVM::Data::symbol_clinit; 124 125 int CompilerToVM::Data::data_section_item_alignment; 126 127 void CompilerToVM::Data::initialize(JVMCI_TRAPS) { 128 Klass_vtable_start_offset = in_bytes(Klass::vtable_start_offset()); 129 Klass_vtable_length_offset = in_bytes(Klass::vtable_length_offset()); 130 131 Method_extra_stack_entries = Method::extra_stack_entries(); 132 133 SharedRuntime_ic_miss_stub = SharedRuntime::get_ic_miss_stub(); 134 SharedRuntime_handle_wrong_method_stub = SharedRuntime::get_handle_wrong_method_stub(); 135 SharedRuntime_deopt_blob_unpack = SharedRuntime::deopt_blob()->unpack(); 136 SharedRuntime_deopt_blob_unpack_with_exception_in_tls = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); 137 SharedRuntime_deopt_blob_uncommon_trap = SharedRuntime::deopt_blob()->uncommon_trap(); 138 SharedRuntime_polling_page_return_handler = SharedRuntime::polling_page_return_handler_blob()->entry_point(); 139 140 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); 141 if (bs_nm != nullptr) { 142 thread_disarmed_guard_value_offset = in_bytes(bs_nm->thread_disarmed_guard_value_offset()); 143 AMD64_ONLY(nmethod_entry_barrier = StubRoutines::x86::method_entry_barrier()); 144 AARCH64_ONLY(nmethod_entry_barrier = StubRoutines::aarch64::method_entry_barrier()); 145 BarrierSetAssembler* bs_asm = BarrierSet::barrier_set()->barrier_set_assembler(); 146 AARCH64_ONLY(BarrierSetAssembler_nmethod_patching_type = (int) bs_asm->nmethod_patching_type()); 147 AARCH64_ONLY(BarrierSetAssembler_patching_epoch_addr = bs_asm->patching_epoch_addr()); 148 } 149 150 #if INCLUDE_ZGC 151 if (UseZGC) { 152 thread_address_bad_mask_offset = in_bytes(XThreadLocalData::address_bad_mask_offset()); 153 ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(); 154 ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded_addr(); 155 ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded_addr(); 156 ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded_addr(); 157 ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded_addr(); 158 ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded_addr(); 159 ZBarrierSetRuntime_load_barrier_on_oop_array = XBarrierSetRuntime::load_barrier_on_oop_array_addr(); 160 ZBarrierSetRuntime_clone = XBarrierSetRuntime::clone_addr(); 161 } 162 #endif 163 164 continuations_enabled = Continuations::enabled(); 165 166 ThreadLocalAllocBuffer_alignment_reserve = ThreadLocalAllocBuffer::alignment_reserve(); 167 168 Universe_collectedHeap = Universe::heap(); 169 Universe_base_vtable_size = Universe::base_vtable_size(); 170 Universe_narrow_oop_base = CompressedOops::base(); 171 Universe_narrow_oop_shift = CompressedOops::shift(); 172 Universe_narrow_klass_base = CompressedKlassPointers::base(); 173 Universe_narrow_klass_shift = CompressedKlassPointers::shift(); 174 Universe_non_oop_bits = Universe::non_oop_word(); 175 Universe_verify_oop_mask = Universe::verify_oop_mask(); 176 Universe_verify_oop_bits = Universe::verify_oop_bits(); 177 178 _supports_inline_contig_alloc = false; 179 _heap_end_addr = (HeapWord**) -1; 180 _heap_top_addr = (HeapWord* volatile*) -1; 181 182 _max_oop_map_stack_offset = (OopMapValue::register_mask - VMRegImpl::stack2reg(0)->value()) * VMRegImpl::stack_slot_size; 183 int max_oop_map_stack_index = _max_oop_map_stack_offset / VMRegImpl::stack_slot_size; 184 assert(OopMapValue::legal_vm_reg_name(VMRegImpl::stack2reg(max_oop_map_stack_index)), "should be valid"); 185 assert(!OopMapValue::legal_vm_reg_name(VMRegImpl::stack2reg(max_oop_map_stack_index + 1)), "should be invalid"); 186 187 symbol_init = (address) vmSymbols::object_initializer_name(); 188 symbol_clinit = (address) vmSymbols::class_initializer_name(); 189 190 _fields_annotations_base_offset = Array<AnnotationArray*>::base_offset_in_bytes(); 191 192 data_section_item_alignment = relocInfo::addr_unit(); 193 194 BarrierSet* bs = BarrierSet::barrier_set(); 195 if (bs->is_a(BarrierSet::CardTableBarrierSet)) { 196 CardTable::CardValue* base = ci_card_table_address(); 197 assert(base != nullptr, "unexpected byte_map_base"); 198 cardtable_start_address = base; 199 cardtable_shift = CardTable::card_shift(); 200 } else { 201 // No card mark barriers 202 cardtable_start_address = 0; 203 cardtable_shift = 0; 204 } 205 206 vm_page_size = os::vm_page_size(); 207 208 #define SET_TRIGFUNC(name) \ 209 if (StubRoutines::name() != nullptr) { \ 210 name = StubRoutines::name(); \ 211 } else { \ 212 name = CAST_FROM_FN_PTR(address, SharedRuntime::name); \ 213 } 214 215 SET_TRIGFUNC(dsin); 216 SET_TRIGFUNC(dcos); 217 SET_TRIGFUNC(dtan); 218 SET_TRIGFUNC(dexp); 219 SET_TRIGFUNC(dlog10); 220 SET_TRIGFUNC(dlog); 221 SET_TRIGFUNC(dpow); 222 223 #undef SET_TRIGFUNC 224 } 225 226 JVMCIObjectArray CompilerToVM::initialize_intrinsics(JVMCI_TRAPS) { 227 int len = vmIntrinsics::number_of_intrinsics() - 1; // Exclude vmIntrinsics::_none, which is 0 228 JVMCIObjectArray vmIntrinsics = JVMCIENV->new_VMIntrinsicMethod_array(len, JVMCI_CHECK_NULL); 229 int index = 0; 230 vmSymbolID kls_sid = vmSymbolID::NO_SID; 231 JVMCIObject kls_str; 232 #define VM_SYMBOL_TO_STRING(s) \ 233 JVMCIENV->create_string(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(s)), JVMCI_CHECK_NULL) 234 #define VM_INTRINSIC_INFO(id, kls, name, sig, ignore_fcode) { \ 235 vmSymbolID sid = VM_SYMBOL_ENUM_NAME(kls); \ 236 if (kls_sid != sid) { \ 237 kls_str = VM_SYMBOL_TO_STRING(kls); \ 238 kls_sid = sid; \ 239 } \ 240 JVMCIObject name_str = VM_SYMBOL_TO_STRING(name); \ 241 JVMCIObject sig_str = VM_SYMBOL_TO_STRING(sig); \ 242 JVMCIObject vmIntrinsicMethod = JVMCIENV->new_VMIntrinsicMethod(kls_str, name_str, sig_str, (jint) vmIntrinsics::id, JVMCI_CHECK_NULL); \ 243 JVMCIENV->put_object_at(vmIntrinsics, index++, vmIntrinsicMethod); \ 244 } 245 246 // VM_INTRINSICS_DO does *not* iterate over vmIntrinsics::_none 247 VM_INTRINSICS_DO(VM_INTRINSIC_INFO, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE) 248 #undef VM_SYMBOL_TO_STRING 249 #undef VM_INTRINSIC_INFO 250 251 assert(index == len, "must be"); 252 return vmIntrinsics; 253 } 254 255 #define PREDEFINED_CONFIG_FLAGS(do_bool_flag, do_int_flag, do_intx_flag, do_uintx_flag) \ 256 do_intx_flag(AllocateInstancePrefetchLines) \ 257 do_intx_flag(AllocatePrefetchDistance) \ 258 do_intx_flag(AllocatePrefetchInstr) \ 259 do_intx_flag(AllocatePrefetchLines) \ 260 do_intx_flag(AllocatePrefetchStepSize) \ 261 do_intx_flag(AllocatePrefetchStyle) \ 262 do_intx_flag(BciProfileWidth) \ 263 do_bool_flag(BootstrapJVMCI) \ 264 do_bool_flag(CITime) \ 265 do_bool_flag(CITimeEach) \ 266 do_uintx_flag(CodeCacheSegmentSize) \ 267 do_intx_flag(CodeEntryAlignment) \ 268 do_intx_flag(ContendedPaddingWidth) \ 269 do_bool_flag(DontCompileHugeMethods) \ 270 do_bool_flag(EagerJVMCI) \ 271 do_bool_flag(EnableContended) \ 272 do_bool_flag(FoldStableValues) \ 273 do_bool_flag(ForceUnreachable) \ 274 do_intx_flag(HugeMethodLimit) \ 275 do_bool_flag(Inline) \ 276 do_intx_flag(JVMCICounterSize) \ 277 do_bool_flag(JVMCIPrintProperties) \ 278 do_int_flag(ObjectAlignmentInBytes) \ 279 do_bool_flag(PrintInlining) \ 280 do_bool_flag(ReduceInitialCardMarks) \ 281 do_bool_flag(RestrictContended) \ 282 do_intx_flag(StackReservedPages) \ 283 do_intx_flag(StackShadowPages) \ 284 do_bool_flag(TLABStats) \ 285 do_uintx_flag(TLABWasteIncrement) \ 286 do_intx_flag(TypeProfileWidth) \ 287 do_bool_flag(UseAESIntrinsics) \ 288 X86_ONLY(do_int_flag(UseAVX)) \ 289 do_bool_flag(UseCRC32Intrinsics) \ 290 do_bool_flag(UseAdler32Intrinsics) \ 291 do_bool_flag(UseCompressedClassPointers) \ 292 do_bool_flag(UseCompressedOops) \ 293 X86_ONLY(do_bool_flag(UseCountLeadingZerosInstruction)) \ 294 X86_ONLY(do_bool_flag(UseCountTrailingZerosInstruction)) \ 295 do_bool_flag(UseG1GC) \ 296 do_bool_flag(UseParallelGC) \ 297 do_bool_flag(UseSerialGC) \ 298 do_bool_flag(UseZGC) \ 299 do_bool_flag(UseEpsilonGC) \ 300 COMPILER2_PRESENT(do_bool_flag(UseMontgomeryMultiplyIntrinsic)) \ 301 COMPILER2_PRESENT(do_bool_flag(UseMontgomerySquareIntrinsic)) \ 302 COMPILER2_PRESENT(do_bool_flag(UseMulAddIntrinsic)) \ 303 COMPILER2_PRESENT(do_bool_flag(UseMultiplyToLenIntrinsic)) \ 304 do_bool_flag(UsePopCountInstruction) \ 305 do_bool_flag(UseSHA1Intrinsics) \ 306 do_bool_flag(UseSHA256Intrinsics) \ 307 do_bool_flag(UseSHA512Intrinsics) \ 308 X86_ONLY(do_int_flag(UseSSE)) \ 309 COMPILER2_PRESENT(do_bool_flag(UseSquareToLenIntrinsic)) \ 310 do_bool_flag(UseTLAB) \ 311 do_bool_flag(VerifyOops) \ 312 313 #define BOXED_BOOLEAN(name, value) name = ((jboolean)(value) ? boxedTrue : boxedFalse) 314 #define BOXED_DOUBLE(name, value) do { jvalue p; p.d = (jdouble) (value); name = JVMCIENV->create_box(T_DOUBLE, &p, JVMCI_CHECK_NULL);} while(0) 315 #define BOXED_LONG(name, value) \ 316 do { \ 317 jvalue p; p.j = (jlong) (value); \ 318 JVMCIObject* e = longs.get(p.j); \ 319 if (e == nullptr) { \ 320 JVMCIObject h = JVMCIENV->create_box(T_LONG, &p, JVMCI_CHECK_NULL); \ 321 longs.put(p.j, h); \ 322 name = h; \ 323 } else { \ 324 name = (*e); \ 325 } \ 326 } while (0) 327 328 #define CSTRING_TO_JSTRING(name, value) \ 329 JVMCIObject name; \ 330 do { \ 331 if (value != nullptr) { \ 332 JVMCIObject* e = strings.get(value); \ 333 if (e == nullptr) { \ 334 JVMCIObject h = JVMCIENV->create_string(value, JVMCI_CHECK_NULL); \ 335 strings.put(value, h); \ 336 name = h; \ 337 } else { \ 338 name = (*e); \ 339 } \ 340 } \ 341 } while (0) 342 343 jobjectArray readConfiguration0(JNIEnv *env, JVMCI_TRAPS) { 344 JavaThread* THREAD = JavaThread::current(); // For exception macros. 345 ResourceHashtable<jlong, JVMCIObject> longs; 346 ResourceHashtable<const char*, JVMCIObject, 347 256, AnyObj::RESOURCE_AREA, mtInternal, 348 &CompilerToVM::cstring_hash, &CompilerToVM::cstring_equals> strings; 349 350 jvalue prim; 351 prim.z = true; JVMCIObject boxedTrue = JVMCIENV->create_box(T_BOOLEAN, &prim, JVMCI_CHECK_NULL); 352 prim.z = false; JVMCIObject boxedFalse = JVMCIENV->create_box(T_BOOLEAN, &prim, JVMCI_CHECK_NULL); 353 354 CompilerToVM::Data::initialize(JVMCI_CHECK_NULL); 355 356 JVMCIENV->VMField_initialize(JVMCI_CHECK_NULL); 357 JVMCIENV->VMFlag_initialize(JVMCI_CHECK_NULL); 358 JVMCIENV->VMIntrinsicMethod_initialize(JVMCI_CHECK_NULL); 359 360 int len = JVMCIVMStructs::localHotSpotVMStructs_count(); 361 JVMCIObjectArray vmFields = JVMCIENV->new_VMField_array(len, JVMCI_CHECK_NULL); 362 for (int i = 0; i < len ; i++) { 363 VMStructEntry vmField = JVMCIVMStructs::localHotSpotVMStructs[i]; 364 const size_t name_buf_size = strlen(vmField.typeName) + strlen(vmField.fieldName) + 2 + 1 /* "::" */; 365 char* name_buf = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, name_buf_size); 366 os::snprintf_checked(name_buf, name_buf_size, "%s::%s", vmField.typeName, vmField.fieldName); 367 CSTRING_TO_JSTRING(name, name_buf); 368 CSTRING_TO_JSTRING(type, vmField.typeString); 369 JVMCIObject box; 370 if (vmField.isStatic && vmField.typeString != nullptr) { 371 if (strcmp(vmField.typeString, "bool") == 0) { 372 BOXED_BOOLEAN(box, *(jbyte*) vmField.address); 373 assert(box.is_non_null(), "must have a box"); 374 } else if (strcmp(vmField.typeString, "int") == 0 || 375 strcmp(vmField.typeString, "jint") == 0 || 376 strcmp(vmField.typeString, "uint32_t") == 0) { 377 BOXED_LONG(box, *(jint*) vmField.address); 378 assert(box.is_non_null(), "must have a box"); 379 } else if (strcmp(vmField.typeString, "uint64_t") == 0) { 380 BOXED_LONG(box, *(uint64_t*) vmField.address); 381 assert(box.is_non_null(), "must have a box"); 382 } else if (strcmp(vmField.typeString, "address") == 0 || 383 strcmp(vmField.typeString, "intptr_t") == 0 || 384 strcmp(vmField.typeString, "uintptr_t") == 0 || 385 strcmp(vmField.typeString, "OopHandle") == 0 || 386 strcmp(vmField.typeString, "size_t") == 0 || 387 // All foo* types are addresses. 388 vmField.typeString[strlen(vmField.typeString) - 1] == '*') { 389 BOXED_LONG(box, *((address*) vmField.address)); 390 assert(box.is_non_null(), "must have a box"); 391 } else { 392 JVMCI_ERROR_NULL("VM field %s has unsupported type %s", name_buf, vmField.typeString); 393 } 394 } 395 JVMCIObject vmFieldObj = JVMCIENV->new_VMField(name, type, vmField.offset, (jlong) vmField.address, box, JVMCI_CHECK_NULL); 396 JVMCIENV->put_object_at(vmFields, i, vmFieldObj); 397 } 398 399 int ints_len = JVMCIVMStructs::localHotSpotVMIntConstants_count(); 400 int longs_len = JVMCIVMStructs::localHotSpotVMLongConstants_count(); 401 len = ints_len + longs_len; 402 JVMCIObjectArray vmConstants = JVMCIENV->new_Object_array(len * 2, JVMCI_CHECK_NULL); 403 int insert = 0; 404 for (int i = 0; i < ints_len ; i++) { 405 VMIntConstantEntry c = JVMCIVMStructs::localHotSpotVMIntConstants[i]; 406 CSTRING_TO_JSTRING(name, c.name); 407 JVMCIObject value; 408 BOXED_LONG(value, c.value); 409 JVMCIENV->put_object_at(vmConstants, insert++, name); 410 JVMCIENV->put_object_at(vmConstants, insert++, value); 411 } 412 for (int i = 0; i < longs_len ; i++) { 413 VMLongConstantEntry c = JVMCIVMStructs::localHotSpotVMLongConstants[i]; 414 CSTRING_TO_JSTRING(name, c.name); 415 JVMCIObject value; 416 BOXED_LONG(value, c.value); 417 JVMCIENV->put_object_at(vmConstants, insert++, name); 418 JVMCIENV->put_object_at(vmConstants, insert++, value); 419 } 420 assert(insert == len * 2, "must be"); 421 422 len = JVMCIVMStructs::localHotSpotVMAddresses_count(); 423 JVMCIObjectArray vmAddresses = JVMCIENV->new_Object_array(len * 2, JVMCI_CHECK_NULL); 424 for (int i = 0; i < len ; i++) { 425 VMAddressEntry a = JVMCIVMStructs::localHotSpotVMAddresses[i]; 426 CSTRING_TO_JSTRING(name, a.name); 427 JVMCIObject value; 428 BOXED_LONG(value, a.value); 429 JVMCIENV->put_object_at(vmAddresses, i * 2, name); 430 JVMCIENV->put_object_at(vmAddresses, i * 2 + 1, value); 431 } 432 433 #define COUNT_FLAG(ignore) +1 434 #ifdef ASSERT 435 #define CHECK_FLAG(type, name) { \ 436 const JVMFlag* flag = JVMFlag::find_declared_flag(#name); \ 437 assert(flag != nullptr, "No such flag named " #name); \ 438 assert(flag->is_##type(), "JVMFlag " #name " is not of type " #type); \ 439 } 440 #else 441 #define CHECK_FLAG(type, name) 442 #endif 443 444 #define ADD_FLAG(type, name, convert) { \ 445 CHECK_FLAG(type, name) \ 446 CSTRING_TO_JSTRING(fname, #name); \ 447 CSTRING_TO_JSTRING(ftype, #type); \ 448 convert(value, name); \ 449 JVMCIObject vmFlagObj = JVMCIENV->new_VMFlag(fname, ftype, value, JVMCI_CHECK_NULL); \ 450 JVMCIENV->put_object_at(vmFlags, i++, vmFlagObj); \ 451 } 452 #define ADD_BOOL_FLAG(name) ADD_FLAG(bool, name, BOXED_BOOLEAN) 453 #define ADD_INT_FLAG(name) ADD_FLAG(int, name, BOXED_LONG) 454 #define ADD_INTX_FLAG(name) ADD_FLAG(intx, name, BOXED_LONG) 455 #define ADD_UINTX_FLAG(name) ADD_FLAG(uintx, name, BOXED_LONG) 456 457 len = 0 + PREDEFINED_CONFIG_FLAGS(COUNT_FLAG, COUNT_FLAG, COUNT_FLAG, COUNT_FLAG); 458 JVMCIObjectArray vmFlags = JVMCIENV->new_VMFlag_array(len, JVMCI_CHECK_NULL); 459 int i = 0; 460 JVMCIObject value; 461 PREDEFINED_CONFIG_FLAGS(ADD_BOOL_FLAG, ADD_INT_FLAG, ADD_INTX_FLAG, ADD_UINTX_FLAG) 462 463 JVMCIObjectArray vmIntrinsics = CompilerToVM::initialize_intrinsics(JVMCI_CHECK_NULL); 464 465 JVMCIObjectArray data = JVMCIENV->new_Object_array(5, JVMCI_CHECK_NULL); 466 JVMCIENV->put_object_at(data, 0, vmFields); 467 JVMCIENV->put_object_at(data, 1, vmConstants); 468 JVMCIENV->put_object_at(data, 2, vmAddresses); 469 JVMCIENV->put_object_at(data, 3, vmFlags); 470 JVMCIENV->put_object_at(data, 4, vmIntrinsics); 471 472 return JVMCIENV->get_jobjectArray(data); 473 }