2205
2206 __ load_method_entry(cache, index);
2207 switch(byte_no) {
2208 case f1_byte:
2209 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2210 break;
2211 case f2_byte:
2212 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2213 break;
2214 default:
2215 ShouldNotReachHere();
2216 }
2217 __ cmpl(temp, code); // have we resolved this bytecode?
2218 __ jcc(Assembler::equal, resolved);
2219
2220 // resolve first time through
2221 // Class initialization barrier slow path lands here as well.
2222 __ bind(L_clinit_barrier_slow);
2223 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2224 __ movl(temp, code);
2225 __ call_VM(noreg, entry, temp);
2226 // Update registers with resolved info
2227 __ load_method_entry(cache, index);
2228
2229 __ bind(resolved);
2230
2231 // Class initialization barrier for static methods
2232 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2233 const Register method = temp;
2234 const Register klass = temp;
2235
2236 __ movptr(method, Address(cache, in_bytes(ResolvedMethodEntry::method_offset())));
2237 __ load_method_holder(klass, method);
2238 __ clinit_barrier(klass, nullptr /*L_fast_path*/, &L_clinit_barrier_slow);
2239 }
2240 }
2241
2242 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2243 Register cache,
2244 Register index) {
2245 const Register temp = rbx;
2250 Bytecodes::Code code = bytecode();
2251 switch (code) {
2252 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2253 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2254 default: break;
2255 }
2256
2257 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2258 __ load_field_entry(cache, index);
2259 if (byte_no == f1_byte) {
2260 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2261 } else {
2262 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2263 }
2264 __ cmpl(temp, code); // have we resolved this bytecode?
2265 __ jcc(Assembler::equal, resolved);
2266
2267 // resolve first time through
2268 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2269 __ movl(temp, code);
2270 __ call_VM(noreg, entry, temp);
2271 // Update registers with resolved info
2272 __ load_field_entry(cache, index);
2273
2274 __ bind(resolved);
2275 }
2276
2277 void TemplateTable::load_resolved_field_entry(Register obj,
2278 Register cache,
2279 Register tos_state,
2280 Register offset,
2281 Register flags,
2282 bool is_static = false) {
2283 assert_different_registers(cache, tos_state, flags, offset);
2284
2285 // Field offset
2286 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2287
2288 // Flags
2289 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2290
2307 const Register cache = rcx;
2308 const Register index = rdx;
2309 assert_different_registers(method, appendix, cache, index);
2310
2311 __ save_bcp();
2312
2313 Label resolved;
2314
2315 __ load_resolved_indy_entry(cache, index);
2316 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2317
2318 // Compare the method to zero
2319 __ testptr(method, method);
2320 __ jcc(Assembler::notZero, resolved);
2321
2322 Bytecodes::Code code = bytecode();
2323
2324 // Call to the interpreter runtime to resolve invokedynamic
2325 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2326 __ movl(method, code); // this is essentially Bytecodes::_invokedynamic
2327 __ call_VM(noreg, entry, method);
2328 // Update registers with resolved info
2329 __ load_resolved_indy_entry(cache, index);
2330 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2331
2332 #ifdef ASSERT
2333 __ testptr(method, method);
2334 __ jcc(Assembler::notZero, resolved);
2335 __ stop("Should be resolved by now");
2336 #endif // ASSERT
2337 __ bind(resolved);
2338
2339 Label L_no_push;
2340 // Check if there is an appendix
2341 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2342 __ testl(index, (1 << ResolvedIndyEntry::has_appendix_shift));
2343 __ jcc(Assembler::zero, L_no_push);
2344
2345 // Get appendix
2346 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2347 // Push the appendix as a trailing parameter
3615 }
3616
3617 if (DTraceAllocProbes) {
3618 // Trigger dtrace event for fastpath
3619 __ push(atos);
3620 __ call_VM_leaf(
3621 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3622 __ pop(atos);
3623 }
3624
3625 __ jmp(done);
3626 }
3627
3628 // slow case
3629 __ bind(slow_case);
3630 __ pop(rcx); // restore stack pointer to what it was when we came in.
3631 __ bind(slow_case_no_pop);
3632
3633 __ get_constant_pool(c_rarg1);
3634 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3635 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3636 __ verify_oop(rax);
3637
3638 // continue
3639 __ bind(done);
3640 }
3641
3642 void TemplateTable::newarray() {
3643 transition(itos, atos);
3644 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3645 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3646 c_rarg1, rax);
3647 }
3648
3649 void TemplateTable::anewarray() {
3650 transition(itos, atos);
3651
3652 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3653 __ get_constant_pool(c_rarg1);
3654 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3655 c_rarg1, c_rarg2, rax);
3656 }
|
2205
2206 __ load_method_entry(cache, index);
2207 switch(byte_no) {
2208 case f1_byte:
2209 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2210 break;
2211 case f2_byte:
2212 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2213 break;
2214 default:
2215 ShouldNotReachHere();
2216 }
2217 __ cmpl(temp, code); // have we resolved this bytecode?
2218 __ jcc(Assembler::equal, resolved);
2219
2220 // resolve first time through
2221 // Class initialization barrier slow path lands here as well.
2222 __ bind(L_clinit_barrier_slow);
2223 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2224 __ movl(temp, code);
2225 __ call_VM_preemptable(noreg, entry, temp);
2226 // Update registers with resolved info
2227 __ load_method_entry(cache, index);
2228
2229 __ bind(resolved);
2230
2231 // Class initialization barrier for static methods
2232 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2233 const Register method = temp;
2234 const Register klass = temp;
2235
2236 __ movptr(method, Address(cache, in_bytes(ResolvedMethodEntry::method_offset())));
2237 __ load_method_holder(klass, method);
2238 __ clinit_barrier(klass, nullptr /*L_fast_path*/, &L_clinit_barrier_slow);
2239 }
2240 }
2241
2242 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2243 Register cache,
2244 Register index) {
2245 const Register temp = rbx;
2250 Bytecodes::Code code = bytecode();
2251 switch (code) {
2252 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2253 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2254 default: break;
2255 }
2256
2257 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2258 __ load_field_entry(cache, index);
2259 if (byte_no == f1_byte) {
2260 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2261 } else {
2262 __ load_unsigned_byte(temp, Address(cache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2263 }
2264 __ cmpl(temp, code); // have we resolved this bytecode?
2265 __ jcc(Assembler::equal, resolved);
2266
2267 // resolve first time through
2268 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2269 __ movl(temp, code);
2270 __ call_VM_preemptable(noreg, entry, temp);
2271 // Update registers with resolved info
2272 __ load_field_entry(cache, index);
2273
2274 __ bind(resolved);
2275 }
2276
2277 void TemplateTable::load_resolved_field_entry(Register obj,
2278 Register cache,
2279 Register tos_state,
2280 Register offset,
2281 Register flags,
2282 bool is_static = false) {
2283 assert_different_registers(cache, tos_state, flags, offset);
2284
2285 // Field offset
2286 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2287
2288 // Flags
2289 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2290
2307 const Register cache = rcx;
2308 const Register index = rdx;
2309 assert_different_registers(method, appendix, cache, index);
2310
2311 __ save_bcp();
2312
2313 Label resolved;
2314
2315 __ load_resolved_indy_entry(cache, index);
2316 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2317
2318 // Compare the method to zero
2319 __ testptr(method, method);
2320 __ jcc(Assembler::notZero, resolved);
2321
2322 Bytecodes::Code code = bytecode();
2323
2324 // Call to the interpreter runtime to resolve invokedynamic
2325 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2326 __ movl(method, code); // this is essentially Bytecodes::_invokedynamic
2327 __ call_VM_preemptable(noreg, entry, method);
2328 // Update registers with resolved info
2329 __ load_resolved_indy_entry(cache, index);
2330 __ movptr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2331
2332 #ifdef ASSERT
2333 __ testptr(method, method);
2334 __ jcc(Assembler::notZero, resolved);
2335 __ stop("Should be resolved by now");
2336 #endif // ASSERT
2337 __ bind(resolved);
2338
2339 Label L_no_push;
2340 // Check if there is an appendix
2341 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2342 __ testl(index, (1 << ResolvedIndyEntry::has_appendix_shift));
2343 __ jcc(Assembler::zero, L_no_push);
2344
2345 // Get appendix
2346 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2347 // Push the appendix as a trailing parameter
3615 }
3616
3617 if (DTraceAllocProbes) {
3618 // Trigger dtrace event for fastpath
3619 __ push(atos);
3620 __ call_VM_leaf(
3621 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3622 __ pop(atos);
3623 }
3624
3625 __ jmp(done);
3626 }
3627
3628 // slow case
3629 __ bind(slow_case);
3630 __ pop(rcx); // restore stack pointer to what it was when we came in.
3631 __ bind(slow_case_no_pop);
3632
3633 __ get_constant_pool(c_rarg1);
3634 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3635 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3636 __ verify_oop(rax);
3637
3638 // continue
3639 __ bind(done);
3640 }
3641
3642 void TemplateTable::newarray() {
3643 transition(itos, atos);
3644 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3645 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3646 c_rarg1, rax);
3647 }
3648
3649 void TemplateTable::anewarray() {
3650 transition(itos, atos);
3651
3652 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3653 __ get_constant_pool(c_rarg1);
3654 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3655 c_rarg1, c_rarg2, rax);
3656 }
|