< prev index next >

src/hotspot/cpu/s390/templateInterpreterGenerator_s390.cpp

Print this page

        

*** 1,8 **** /* ! * Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. ! * Copyright (c) 2016, 2019, SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. --- 1,8 ---- /* ! * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved. ! * Copyright (c) 2016, 2018, SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation.
*** 819,829 **** void TemplateInterpreterGenerator::generate_stack_overflow_check(Register frame_size, Register tmp1) { Register tmp2 = Z_R1_scratch; const int page_size = os::vm_page_size(); NearLabel after_frame_check; ! BLOCK_COMMENT("stack_overflow_check {"); assert_different_registers(frame_size, tmp1); // Stack banging is sufficient overflow check if frame_size < page_size. if (Immediate::is_uimm(page_size, 15)) { --- 819,829 ---- void TemplateInterpreterGenerator::generate_stack_overflow_check(Register frame_size, Register tmp1) { Register tmp2 = Z_R1_scratch; const int page_size = os::vm_page_size(); NearLabel after_frame_check; ! BLOCK_COMMENT("counter_overflow {"); assert_different_registers(frame_size, tmp1); // Stack banging is sufficient overflow check if frame_size < page_size. if (Immediate::is_uimm(page_size, 15)) {
*** 881,891 **** __ z_br(tmp1); // If you get to here, then there is enough stack space. __ bind(after_frame_check); ! BLOCK_COMMENT("} stack_overflow_check"); } // Allocate monitor and lock method (asm interpreter). // // Args: --- 881,891 ---- __ z_br(tmp1); // If you get to here, then there is enough stack space. __ bind(after_frame_check); ! BLOCK_COMMENT("} counter_overflow"); } // Allocate monitor and lock method (asm interpreter). // // Args:
*** 925,937 **** __ z_bru(done); __ bind(static_method); // Lock the java mirror. ! // Load mirror from interpreter frame. ! __ z_lg(object, _z_ijava_state_neg(mirror), Z_fp); ! #ifdef ASSERT { NearLabel L; __ compare64_and_branch(object, (intptr_t) 0, Assembler::bcondNotEqual, L); reentry = __ stop_chain_static(reentry, "synchronization object is NULL"); --- 925,935 ---- __ z_bru(done); __ bind(static_method); // Lock the java mirror. ! __ load_mirror(object, method); #ifdef ASSERT { NearLabel L; __ compare64_and_branch(object, (intptr_t) 0, Assembler::bcondNotEqual, L); reentry = __ stop_chain_static(reentry, "synchronization object is NULL");
*** 991,1014 **** //============================================================================= // Allocate space for locals other than the parameters, the // interpreter state, monitors, and the expression stack. ! const Register local_count = Z_ARG5; ! const Register fp = Z_tmp_2; ! const Register const_method = Z_ARG1; BLOCK_COMMENT("generate_fixed_frame {"); { // local registers const Register top_frame_size = Z_ARG2; const Register sp_after_resize = Z_ARG3; const Register max_stack = Z_ARG4; ! __ z_lg(const_method, Address(Z_method, Method::const_offset())); ! __ z_llgh(max_stack, Address(const_method, ConstMethod::size_of_parameters_offset())); ! __ z_sllg(Z_locals /*parameter_count bytes*/, max_stack /*parameter_count*/, LogBytesPerWord); if (native_call) { // If we're calling a native method, we replace max_stack (which is // zero) with space for the worst-case signature handler varargs // vector, which is: --- 989,1012 ---- //============================================================================= // Allocate space for locals other than the parameters, the // interpreter state, monitors, and the expression stack. ! const Register local_count = Z_ARG5; ! const Register fp = Z_tmp_2; BLOCK_COMMENT("generate_fixed_frame {"); + { // local registers const Register top_frame_size = Z_ARG2; const Register sp_after_resize = Z_ARG3; const Register max_stack = Z_ARG4; ! // local_count = method->constMethod->max_locals(); ! __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset())); ! __ z_llgh(local_count, Address(Z_R1_scratch, ConstMethod::size_of_locals_offset())); if (native_call) { // If we're calling a native method, we replace max_stack (which is // zero) with space for the worst-case signature handler varargs // vector, which is:
*** 1024,1033 **** --- 1022,1034 ---- // still going to cut the stack back by the ABI register parameter // count so as to get SP+16 pointing at the ABI outgoing parameter // area, so we need to allocate at least that much even though we're // going to throw it away. // + + __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset())); + __ z_llgh(max_stack, Address(Z_R1_scratch, ConstMethod::size_of_parameters_offset())); __ add2reg(max_stack, 2); NearLabel passing_args_on_stack; // max_stack in bytes
*** 1039,1056 **** __ load_const_optimized(max_stack, argument_registers_in_bytes); __ bind(passing_args_on_stack); } else { // !native_call ! // local_count = method->constMethod->max_locals(); ! __ z_llgh(local_count, Address(const_method, ConstMethod::size_of_locals_offset())); // Calculate number of non-parameter locals (in slots): ! __ z_sgr(local_count, max_stack); // max_stack = method->max_stack(); ! __ z_llgh(max_stack, Address(const_method, ConstMethod::max_stack_offset())); // max_stack in bytes __ z_sllg(max_stack, max_stack, LogBytesPerWord); } // Resize (i.e. normally shrink) the top frame F1 ... --- 1040,1057 ---- __ load_const_optimized(max_stack, argument_registers_in_bytes); __ bind(passing_args_on_stack); } else { // !native_call ! __ z_lg(max_stack, method_(const)); // Calculate number of non-parameter locals (in slots): ! __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset())); ! __ z_sh(local_count, Address(Z_R1_scratch, ConstMethod::size_of_parameters_offset())); // max_stack = method->max_stack(); ! __ z_llgh(max_stack, Address(max_stack, ConstMethod::max_stack_offset())); // max_stack in bytes __ z_sllg(max_stack, max_stack, LogBytesPerWord); } // Resize (i.e. normally shrink) the top frame F1 ...
*** 1086,1112 **** // sp_after_resize = Z_esp - delta // // delta = PARENT_IJAVA_FRAME_ABI + (locals_count - params_count) __ add2reg(sp_after_resize, (Interpreter::stackElementSize) - (frame::z_parent_ijava_frame_abi_size), Z_esp); ! if (!native_call) { ! __ z_sllg(Z_R0_scratch, local_count, LogBytesPerWord); // Params have already been subtracted from local_count. ! __ z_slgr(sp_after_resize, Z_R0_scratch); ! } // top_frame_size = TOP_IJAVA_FRAME_ABI + max_stack + size of interpreter state __ add2reg(top_frame_size, frame::z_top_ijava_frame_abi_size + ! frame::z_ijava_state_size, max_stack); if (!native_call) { // Stack overflow check. // Native calls don't need the stack size check since they have no // expression stack and the arguments are already on the stack and // we only add a handful of words to the stack. ! Register frame_size = max_stack; // Reuse the register for max_stack. __ z_lgr(frame_size, Z_SP); __ z_sgr(frame_size, sp_after_resize); __ z_agr(frame_size, top_frame_size); generate_stack_overflow_check(frame_size, fp/*tmp1*/); } --- 1087,1112 ---- // sp_after_resize = Z_esp - delta // // delta = PARENT_IJAVA_FRAME_ABI + (locals_count - params_count) __ add2reg(sp_after_resize, (Interpreter::stackElementSize) - (frame::z_parent_ijava_frame_abi_size), Z_esp); ! __ z_sllg(Z_R0_scratch, local_count, LogBytesPerWord); // Params have already been subtracted from local_count. ! __ z_slgr(sp_after_resize, Z_R0_scratch); // top_frame_size = TOP_IJAVA_FRAME_ABI + max_stack + size of interpreter state __ add2reg(top_frame_size, frame::z_top_ijava_frame_abi_size + ! frame::z_ijava_state_size + ! frame::interpreter_frame_monitor_size() * wordSize, max_stack); if (!native_call) { // Stack overflow check. // Native calls don't need the stack size check since they have no // expression stack and the arguments are already on the stack and // we only add a handful of words to the stack. ! Register frame_size = max_stack; // Reuse the regiser for max_stack. __ z_lgr(frame_size, Z_SP); __ z_sgr(frame_size, sp_after_resize); __ z_agr(frame_size, top_frame_size); generate_stack_overflow_check(frame_size, fp/*tmp1*/); }
*** 1134,1158 **** __ load_const_optimized(local_addr, frame::z_istate_magic_number); __ z_stg(local_addr, _z_ijava_state_neg(magic), fp); #endif // Save sender SP from F1 (i.e. before it was potentially modified by an ! // adapter) into F0's interpreter state. We use it as well to revert // resizing the frame above. __ z_stg(Z_R10, _z_ijava_state_neg(sender_sp), fp); ! // Load cp cache and save it at the end of this block. ! __ z_lg(Z_R1_scratch, Address(const_method, ConstMethod::constants_offset())); __ z_lg(Z_R1_scratch, Address(Z_R1_scratch, ConstantPool::cache_offset_in_bytes())); // z_ijava_state->method = method; __ z_stg(Z_method, _z_ijava_state_neg(method), fp); // Point locals at the first argument. Method's locals are the // parameters on top of caller's expression stack. // Tos points past last Java argument. __ z_agr(Z_locals, Z_esp); // z_ijava_state->locals - i*BytesPerWord points to i-th Java local (i starts at 0) // z_ijava_state->locals = Z_esp + parameter_count bytes __ z_stg(Z_locals, _z_ijava_state_neg(locals), fp); --- 1134,1163 ---- __ load_const_optimized(local_addr, frame::z_istate_magic_number); __ z_stg(local_addr, _z_ijava_state_neg(magic), fp); #endif // Save sender SP from F1 (i.e. before it was potentially modified by an ! // adapter) into F0's interpreter state. We us it as well to revert // resizing the frame above. __ z_stg(Z_R10, _z_ijava_state_neg(sender_sp), fp); ! // Load cp cache and save it at the and of this block. ! __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset())); ! __ z_lg(Z_R1_scratch, Address(Z_R1_scratch, ConstMethod::constants_offset())); __ z_lg(Z_R1_scratch, Address(Z_R1_scratch, ConstantPool::cache_offset_in_bytes())); // z_ijava_state->method = method; __ z_stg(Z_method, _z_ijava_state_neg(method), fp); // Point locals at the first argument. Method's locals are the // parameters on top of caller's expression stack. // Tos points past last Java argument. + __ z_lg(Z_locals, Address(Z_method, Method::const_offset())); + __ z_llgh(Z_locals /*parameter_count words*/, + Address(Z_locals, ConstMethod::size_of_parameters_offset())); + __ z_sllg(Z_locals /*parameter_count bytes*/, Z_locals /*parameter_count*/, LogBytesPerWord); __ z_agr(Z_locals, Z_esp); // z_ijava_state->locals - i*BytesPerWord points to i-th Java local (i starts at 0) // z_ijava_state->locals = Z_esp + parameter_count bytes __ z_stg(Z_locals, _z_ijava_state_neg(locals), fp);
*** 1176,1186 **** // Initialize z_ijava_state->bcp and Z_bcp. if (native_call) { __ clear_reg(Z_bcp); // Must initialize. Will get written into frame where GC reads it. } else { ! __ add2reg(Z_bcp, in_bytes(ConstMethod::codes_offset()), const_method); } __ z_stg(Z_bcp, _z_ijava_state_neg(bcp), fp); // no monitors and empty operand stack // => z_ijava_state->monitors points to the top slot in IJAVA_STATE. --- 1181,1192 ---- // Initialize z_ijava_state->bcp and Z_bcp. if (native_call) { __ clear_reg(Z_bcp); // Must initialize. Will get written into frame where GC reads it. } else { ! __ z_lg(Z_bcp, method_(const)); ! __ add2reg(Z_bcp, in_bytes(ConstMethod::codes_offset())); } __ z_stg(Z_bcp, _z_ijava_state_neg(bcp), fp); // no monitors and empty operand stack // => z_ijava_state->monitors points to the top slot in IJAVA_STATE.
*** 1194,1218 **** // z_ijava_state->cpoolCache = Z_R1_scratch (see load above); __ z_stg(Z_R1_scratch, _z_ijava_state_neg(cpoolCache), fp); // Get mirror and store it in the frame as GC root for this Method*. ! __ load_mirror_from_const_method(Z_R1_scratch, const_method); __ z_stg(Z_R1_scratch, _z_ijava_state_neg(mirror), fp); BLOCK_COMMENT("} generate_fixed_frame: initialize interpreter state"); //============================================================================= if (!native_call) { // Local_count is already num_locals_slots - num_param_slots. ! // Start of locals: local_addr = Z_locals - locals size + 1 slot ! __ z_llgh(Z_R0_scratch, Address(const_method, ConstMethod::size_of_locals_offset())); ! __ add2reg(local_addr, BytesPerWord, Z_locals); __ z_sllg(Z_R0_scratch, Z_R0_scratch, LogBytesPerWord); __ z_sgr(local_addr, Z_R0_scratch); ! __ Clear_Array(local_count, local_addr, Z_ARG2); } } // Finally set the frame pointer, destroying Z_method. assert(Z_fp == Z_method, "maybe set Z_fp earlier if other register than Z_method"); --- 1200,1265 ---- // z_ijava_state->cpoolCache = Z_R1_scratch (see load above); __ z_stg(Z_R1_scratch, _z_ijava_state_neg(cpoolCache), fp); // Get mirror and store it in the frame as GC root for this Method*. ! __ load_mirror(Z_R1_scratch, Z_method); __ z_stg(Z_R1_scratch, _z_ijava_state_neg(mirror), fp); BLOCK_COMMENT("} generate_fixed_frame: initialize interpreter state"); //============================================================================= if (!native_call) { + // Fill locals with 0x0s. + NearLabel locals_zeroed; + NearLabel doXC; + // Local_count is already num_locals_slots - num_param_slots. ! __ compare64_and_branch(local_count, (intptr_t)0L, Assembler::bcondNotHigh, locals_zeroed); ! ! // Advance local_addr to point behind locals (creates positive incr. in loop). ! __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset())); ! __ z_llgh(Z_R0_scratch, Address(Z_R1_scratch, ConstMethod::size_of_locals_offset())); ! __ add2reg(Z_R0_scratch, -1); ! ! __ z_lgr(local_addr/*locals*/, Z_locals); __ z_sllg(Z_R0_scratch, Z_R0_scratch, LogBytesPerWord); + __ z_sllg(local_count, local_count, LogBytesPerWord); // Local_count are non param locals. __ z_sgr(local_addr, Z_R0_scratch); ! if (VM_Version::has_Prefetch()) { ! __ z_pfd(0x02, 0, Z_R0, local_addr); ! __ z_pfd(0x02, 256, Z_R0, local_addr); ! } ! ! // Can't optimise for Z10 using "compare and branch" (immediate value is too big). ! __ z_cghi(local_count, 256); ! __ z_brnh(doXC); ! ! // MVCLE: Initialize if quite a lot locals. ! // __ bind(doMVCLE); ! __ z_lgr(Z_R0_scratch, local_addr); ! __ z_lgr(Z_R1_scratch, local_count); ! __ clear_reg(Z_ARG2); // Src len of MVCLE is zero. ! ! __ MacroAssembler::move_long_ext(Z_R0_scratch, Z_ARG1, 0); ! __ z_bru(locals_zeroed); ! ! Label XC_template; ! __ bind(XC_template); ! __ z_xc(0, 0, local_addr, 0, local_addr); ! ! __ bind(doXC); ! __ z_bctgr(local_count, Z_R0); // Get #bytes-1 for EXECUTE. ! if (VM_Version::has_ExecuteExtensions()) { ! __ z_exrl(local_count, XC_template); // Execute XC with variable length. ! } else { ! __ z_larl(Z_R1_scratch, XC_template); ! __ z_ex(local_count, 0, Z_R0, Z_R1_scratch); // Execute XC with variable length. ! } ! ! __ bind(locals_zeroed); } } // Finally set the frame pointer, destroying Z_method. assert(Z_fp == Z_method, "maybe set Z_fp earlier if other register than Z_method");
*** 1481,1492 **** // Pass mirror handle if static call. { Label method_is_not_static; __ testbit(method2_(Rmethod, access_flags), JVM_ACC_STATIC_BIT); __ z_bfalse(method_is_not_static); ! // Load mirror from interpreter frame. ! __ z_lg(Z_R1, _z_ijava_state_neg(mirror), Z_fp); // z_ijava_state.oop_temp = pool_holder->klass_part()->java_mirror(); __ z_stg(Z_R1, oop_tmp_offset, Z_fp); // Pass handle to mirror as 2nd argument to JNI method. __ add2reg(Z_ARG2, oop_tmp_offset, Z_fp); __ bind(method_is_not_static); --- 1528,1539 ---- // Pass mirror handle if static call. { Label method_is_not_static; __ testbit(method2_(Rmethod, access_flags), JVM_ACC_STATIC_BIT); __ z_bfalse(method_is_not_static); ! // Get mirror. ! __ load_mirror(Z_R1, Rmethod); // z_ijava_state.oop_temp = pool_holder->klass_part()->java_mirror(); __ z_stg(Z_R1, oop_tmp_offset, Z_fp); // Pass handle to mirror as 2nd argument to JNI method. __ add2reg(Z_ARG2, oop_tmp_offset, Z_fp); __ bind(method_is_not_static);
< prev index next >