2189 __ lbu(temp, Address(temp, 0));
2190 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2191
2192 __ mv(t0, (int) code);
2193
2194 // Class initialization barrier for static methods
2195 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2196 __ bne(temp, t0, L_clinit_barrier_slow); // have we resolved this bytecode?
2197 __ ld(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2198 __ load_method_holder(temp, temp);
2199 __ clinit_barrier(temp, t0, &L_done, /*L_slow_path*/ nullptr);
2200 __ bind(L_clinit_barrier_slow);
2201 } else {
2202 __ beq(temp, t0, L_done); // have we resolved this bytecode?
2203 }
2204
2205 // resolve first time through
2206 // Class initialization barrier slow path lands here as well.
2207 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2208 __ mv(temp, (int) code);
2209 __ call_VM(noreg, entry, temp);
2210
2211 // Update registers with resolved info
2212 __ load_method_entry(Rcache, index);
2213 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2214 // so all clients ofthis method must be modified accordingly
2215 __ bind(L_done);
2216 }
2217
2218 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2219 Register Rcache,
2220 Register index) {
2221 const Register temp = x9;
2222 assert_different_registers(Rcache, index, temp);
2223
2224 Label L_clinit_barrier_slow, L_done;
2225
2226 Bytecodes::Code code = bytecode();
2227 switch (code) {
2228 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2229 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2242 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2243 __ mv(t0, (int) code); // have we resolved this bytecode?
2244
2245 // Class initialization barrier for static fields
2246 if (VM_Version::supports_fast_class_init_checks() &&
2247 (bytecode() == Bytecodes::_getstatic || bytecode() == Bytecodes::_putstatic)) {
2248 const Register field_holder = temp;
2249
2250 __ bne(temp, t0, L_clinit_barrier_slow);
2251 __ ld(field_holder, Address(Rcache, in_bytes(ResolvedFieldEntry::field_holder_offset())));
2252 __ clinit_barrier(field_holder, t0, &L_done, /*L_slow_path*/ nullptr);
2253 __ bind(L_clinit_barrier_slow);
2254 } else {
2255 __ beq(temp, t0, L_done);
2256 }
2257
2258 // resolve first time through
2259 // Class initialization barrier slow path lands here as well.
2260 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2261 __ mv(temp, (int) code);
2262 __ call_VM(noreg, entry, temp);
2263
2264 // Update registers with resolved info
2265 __ load_field_entry(Rcache, index);
2266 __ bind(L_done);
2267 }
2268
2269 void TemplateTable::load_resolved_field_entry(Register obj,
2270 Register cache,
2271 Register tos_state,
2272 Register offset,
2273 Register flags,
2274 bool is_static = false) {
2275 assert_different_registers(cache, tos_state, flags, offset);
2276
2277 // Field offset
2278 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2279
2280 // Flags
2281 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2282
3595 } else {
3596 __ mv(t0, (intptr_t)markWord::prototype().value());
3597 __ sd(t0, Address(x10, oopDesc::mark_offset_in_bytes()));
3598 __ store_klass_gap(x10, zr); // zero klass gap for compressed oops
3599 __ store_klass(x10, x14); // store klass last
3600 }
3601
3602 if (DTraceAllocProbes) {
3603 // Trigger dtrace event for fastpath
3604 __ push(atos); // save the return value
3605 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), x10);
3606 __ pop(atos); // restore the return value
3607 }
3608 __ j(done);
3609 }
3610
3611 // slow case
3612 __ bind(slow_case);
3613 __ get_constant_pool(c_rarg1);
3614 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3615 call_VM(x10, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3616 __ verify_oop(x10);
3617
3618 // continue
3619 __ bind(done);
3620 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3621 __ membar(MacroAssembler::StoreStore);
3622 }
3623
3624 void TemplateTable::newarray() {
3625 transition(itos, atos);
3626 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3627 __ mv(c_rarg2, x10);
3628 call_VM(x10, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3629 c_rarg1, c_rarg2);
3630 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3631 __ membar(MacroAssembler::StoreStore);
3632 }
3633
3634 void TemplateTable::anewarray() {
3635 transition(itos, atos);
|
2189 __ lbu(temp, Address(temp, 0));
2190 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2191
2192 __ mv(t0, (int) code);
2193
2194 // Class initialization barrier for static methods
2195 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2196 __ bne(temp, t0, L_clinit_barrier_slow); // have we resolved this bytecode?
2197 __ ld(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2198 __ load_method_holder(temp, temp);
2199 __ clinit_barrier(temp, t0, &L_done, /*L_slow_path*/ nullptr);
2200 __ bind(L_clinit_barrier_slow);
2201 } else {
2202 __ beq(temp, t0, L_done); // have we resolved this bytecode?
2203 }
2204
2205 // resolve first time through
2206 // Class initialization barrier slow path lands here as well.
2207 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2208 __ mv(temp, (int) code);
2209 __ call_VM_preemptable(noreg, entry, temp);
2210
2211 // Update registers with resolved info
2212 __ load_method_entry(Rcache, index);
2213 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2214 // so all clients ofthis method must be modified accordingly
2215 __ bind(L_done);
2216 }
2217
2218 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2219 Register Rcache,
2220 Register index) {
2221 const Register temp = x9;
2222 assert_different_registers(Rcache, index, temp);
2223
2224 Label L_clinit_barrier_slow, L_done;
2225
2226 Bytecodes::Code code = bytecode();
2227 switch (code) {
2228 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2229 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2242 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2243 __ mv(t0, (int) code); // have we resolved this bytecode?
2244
2245 // Class initialization barrier for static fields
2246 if (VM_Version::supports_fast_class_init_checks() &&
2247 (bytecode() == Bytecodes::_getstatic || bytecode() == Bytecodes::_putstatic)) {
2248 const Register field_holder = temp;
2249
2250 __ bne(temp, t0, L_clinit_barrier_slow);
2251 __ ld(field_holder, Address(Rcache, in_bytes(ResolvedFieldEntry::field_holder_offset())));
2252 __ clinit_barrier(field_holder, t0, &L_done, /*L_slow_path*/ nullptr);
2253 __ bind(L_clinit_barrier_slow);
2254 } else {
2255 __ beq(temp, t0, L_done);
2256 }
2257
2258 // resolve first time through
2259 // Class initialization barrier slow path lands here as well.
2260 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2261 __ mv(temp, (int) code);
2262 __ call_VM_preemptable(noreg, entry, temp);
2263
2264 // Update registers with resolved info
2265 __ load_field_entry(Rcache, index);
2266 __ bind(L_done);
2267 }
2268
2269 void TemplateTable::load_resolved_field_entry(Register obj,
2270 Register cache,
2271 Register tos_state,
2272 Register offset,
2273 Register flags,
2274 bool is_static = false) {
2275 assert_different_registers(cache, tos_state, flags, offset);
2276
2277 // Field offset
2278 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2279
2280 // Flags
2281 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2282
3595 } else {
3596 __ mv(t0, (intptr_t)markWord::prototype().value());
3597 __ sd(t0, Address(x10, oopDesc::mark_offset_in_bytes()));
3598 __ store_klass_gap(x10, zr); // zero klass gap for compressed oops
3599 __ store_klass(x10, x14); // store klass last
3600 }
3601
3602 if (DTraceAllocProbes) {
3603 // Trigger dtrace event for fastpath
3604 __ push(atos); // save the return value
3605 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), x10);
3606 __ pop(atos); // restore the return value
3607 }
3608 __ j(done);
3609 }
3610
3611 // slow case
3612 __ bind(slow_case);
3613 __ get_constant_pool(c_rarg1);
3614 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3615 __ call_VM_preemptable(x10, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3616 __ verify_oop(x10);
3617
3618 // continue
3619 __ bind(done);
3620 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3621 __ membar(MacroAssembler::StoreStore);
3622 }
3623
3624 void TemplateTable::newarray() {
3625 transition(itos, atos);
3626 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3627 __ mv(c_rarg2, x10);
3628 call_VM(x10, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3629 c_rarg1, c_rarg2);
3630 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3631 __ membar(MacroAssembler::StoreStore);
3632 }
3633
3634 void TemplateTable::anewarray() {
3635 transition(itos, atos);
|