2207
2208 __ load_method_entry(cache, index);
2209 switch(byte_no) {
2210 case f1_byte:
2211 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2212 break;
2213 case f2_byte:
2214 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2215 break;
2216 default:
2217 ShouldNotReachHere();
2218 }
2219 __ cmpl(temp, code); // have we resolved this bytecode?
2220 __ jcc(Assembler::equal, resolved);
2221
2222 // resolve first time through
2223 // Class initialization barrier slow path lands here as well.
2224 __ bind(L_clinit_barrier_slow);
2225 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2226 __ movl(temp, code);
2227 __ call_VM(noreg, entry, temp);
2228 // Update registers with resolved info
2229 __ load_method_entry(cache, index);
2230
2231 __ bind(resolved);
2232
2233 // Class initialization barrier for static methods
2234 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2235 const Register method = temp;
2236 const Register klass = temp;
2237
2238 __ movptr(method, Address(cache, in_bytes(ResolvedMethodEntry::method_offset())));
2239 __ load_method_holder(klass, method);
2240 __ clinit_barrier(klass, nullptr /*L_fast_path*/, &L_clinit_barrier_slow);
2241 }
2242 }
2243
2244 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2245 Register cache,
2246 Register index) {
2247 const Register temp = rbx;
2252 Bytecodes::Code code = bytecode();
2253 switch (code) {
2254 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2255 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2256 default: break;
2257 }
2258
2259 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2260 __ load_field_entry(cache, index);
2261 if (byte_no == f1_byte) {
2262 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2263 } else {
2264 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2265 }
2266 __ cmpl(temp, code); // have we resolved this bytecode?
2267 __ jcc(Assembler::equal, resolved);
2268
2269 // resolve first time through
2270 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2271 __ movl(temp, code);
2272 __ call_VM(noreg, entry, temp);
2273 // Update registers with resolved info
2274 __ load_field_entry(cache, index);
2275
2276 __ bind(resolved);
2277 }
2278
2279 void TemplateTable::load_resolved_field_entry(Register obj,
2280 Register cache,
2281 Register tos_state,
2282 Register offset,
2283 Register flags,
2284 bool is_static = false) {
2285 assert_different_registers(cache, tos_state, flags, offset);
2286
2287 // Field offset
2288 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2289
2290 // Flags
2291 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2292
2309 const Register cache = rcx;
2310 const Register index = rdx;
2311 assert_different_registers(method, appendix, cache, index);
2312
2313 __ save_bcp();
2314
2315 Label resolved;
2316
2317 __ load_resolved_indy_entry(cache, index);
2318 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2319
2320 // Compare the method to zero
2321 __ testptr(method, method);
2322 __ jcc(Assembler::notZero, resolved);
2323
2324 Bytecodes::Code code = bytecode();
2325
2326 // Call to the interpreter runtime to resolve invokedynamic
2327 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2328 __ movl(method, code); // this is essentially Bytecodes::_invokedynamic
2329 __ call_VM(noreg, entry, method);
2330 // Update registers with resolved info
2331 __ load_resolved_indy_entry(cache, index);
2332 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2333
2334 #ifdef ASSERT
2335 __ testptr(method, method);
2336 __ jcc(Assembler::notZero, resolved);
2337 __ stop("Should be resolved by now");
2338 #endif // ASSERT
2339 __ bind(resolved);
2340
2341 Label L_no_push;
2342 // Check if there is an appendix
2343 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2344 __ testl(index, (1 << ResolvedIndyEntry::has_appendix_shift));
2345 __ jcc(Assembler::zero, L_no_push);
2346
2347 // Get appendix
2348 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2349 // Push the appendix as a trailing parameter
3617 }
3618
3619 if (DTraceAllocProbes) {
3620 // Trigger dtrace event for fastpath
3621 __ push(atos);
3622 __ call_VM_leaf(
3623 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3624 __ pop(atos);
3625 }
3626
3627 __ jmp(done);
3628 }
3629
3630 // slow case
3631 __ bind(slow_case);
3632 __ pop(rcx); // restore stack pointer to what it was when we came in.
3633 __ bind(slow_case_no_pop);
3634
3635 __ get_constant_pool(c_rarg1);
3636 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3637 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3638 __ verify_oop(rax);
3639
3640 // continue
3641 __ bind(done);
3642 }
3643
3644 void TemplateTable::newarray() {
3645 transition(itos, atos);
3646 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3647 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3648 c_rarg1, rax);
3649 }
3650
3651 void TemplateTable::anewarray() {
3652 transition(itos, atos);
3653
3654 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3655 __ get_constant_pool(c_rarg1);
3656 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3657 c_rarg1, c_rarg2, rax);
3658 }
|
2207
2208 __ load_method_entry(cache, index);
2209 switch(byte_no) {
2210 case f1_byte:
2211 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2212 break;
2213 case f2_byte:
2214 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2215 break;
2216 default:
2217 ShouldNotReachHere();
2218 }
2219 __ cmpl(temp, code); // have we resolved this bytecode?
2220 __ jcc(Assembler::equal, resolved);
2221
2222 // resolve first time through
2223 // Class initialization barrier slow path lands here as well.
2224 __ bind(L_clinit_barrier_slow);
2225 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2226 __ movl(temp, code);
2227 __ call_VM_preemptable(noreg, entry, temp);
2228 // Update registers with resolved info
2229 __ load_method_entry(cache, index);
2230
2231 __ bind(resolved);
2232
2233 // Class initialization barrier for static methods
2234 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2235 const Register method = temp;
2236 const Register klass = temp;
2237
2238 __ movptr(method, Address(cache, in_bytes(ResolvedMethodEntry::method_offset())));
2239 __ load_method_holder(klass, method);
2240 __ clinit_barrier(klass, nullptr /*L_fast_path*/, &L_clinit_barrier_slow);
2241 }
2242 }
2243
2244 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2245 Register cache,
2246 Register index) {
2247 const Register temp = rbx;
2252 Bytecodes::Code code = bytecode();
2253 switch (code) {
2254 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2255 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2256 default: break;
2257 }
2258
2259 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2260 __ load_field_entry(cache, index);
2261 if (byte_no == f1_byte) {
2262 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2263 } else {
2264 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2265 }
2266 __ cmpl(temp, code); // have we resolved this bytecode?
2267 __ jcc(Assembler::equal, resolved);
2268
2269 // resolve first time through
2270 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2271 __ movl(temp, code);
2272 __ call_VM_preemptable(noreg, entry, temp);
2273 // Update registers with resolved info
2274 __ load_field_entry(cache, index);
2275
2276 __ bind(resolved);
2277 }
2278
2279 void TemplateTable::load_resolved_field_entry(Register obj,
2280 Register cache,
2281 Register tos_state,
2282 Register offset,
2283 Register flags,
2284 bool is_static = false) {
2285 assert_different_registers(cache, tos_state, flags, offset);
2286
2287 // Field offset
2288 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2289
2290 // Flags
2291 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2292
2309 const Register cache = rcx;
2310 const Register index = rdx;
2311 assert_different_registers(method, appendix, cache, index);
2312
2313 __ save_bcp();
2314
2315 Label resolved;
2316
2317 __ load_resolved_indy_entry(cache, index);
2318 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2319
2320 // Compare the method to zero
2321 __ testptr(method, method);
2322 __ jcc(Assembler::notZero, resolved);
2323
2324 Bytecodes::Code code = bytecode();
2325
2326 // Call to the interpreter runtime to resolve invokedynamic
2327 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2328 __ movl(method, code); // this is essentially Bytecodes::_invokedynamic
2329 __ call_VM_preemptable(noreg, entry, method);
2330 // Update registers with resolved info
2331 __ load_resolved_indy_entry(cache, index);
2332 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2333
2334 #ifdef ASSERT
2335 __ testptr(method, method);
2336 __ jcc(Assembler::notZero, resolved);
2337 __ stop("Should be resolved by now");
2338 #endif // ASSERT
2339 __ bind(resolved);
2340
2341 Label L_no_push;
2342 // Check if there is an appendix
2343 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2344 __ testl(index, (1 << ResolvedIndyEntry::has_appendix_shift));
2345 __ jcc(Assembler::zero, L_no_push);
2346
2347 // Get appendix
2348 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2349 // Push the appendix as a trailing parameter
3617 }
3618
3619 if (DTraceAllocProbes) {
3620 // Trigger dtrace event for fastpath
3621 __ push(atos);
3622 __ call_VM_leaf(
3623 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3624 __ pop(atos);
3625 }
3626
3627 __ jmp(done);
3628 }
3629
3630 // slow case
3631 __ bind(slow_case);
3632 __ pop(rcx); // restore stack pointer to what it was when we came in.
3633 __ bind(slow_case_no_pop);
3634
3635 __ get_constant_pool(c_rarg1);
3636 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3637 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3638 __ verify_oop(rax);
3639
3640 // continue
3641 __ bind(done);
3642 }
3643
3644 void TemplateTable::newarray() {
3645 transition(itos, atos);
3646 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3647 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3648 c_rarg1, rax);
3649 }
3650
3651 void TemplateTable::anewarray() {
3652 transition(itos, atos);
3653
3654 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3655 __ get_constant_pool(c_rarg1);
3656 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3657 c_rarg1, c_rarg2, rax);
3658 }
|