1 /*
   2  * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/macroAssembler.inline.hpp"
  26 #include "code/codeBlob.hpp"
  27 #include "gc/z/zBarrier.inline.hpp"
  28 #include "gc/z/zBarrierSet.hpp"
  29 #include "gc/z/zBarrierSetAssembler.hpp"
  30 #include "gc/z/zBarrierSetRuntime.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "runtime/stubCodeGenerator.hpp"
  33 #include "utilities/macros.hpp"
  34 #ifdef COMPILER1
  35 #include "c1/c1_LIRAssembler.hpp"
  36 #include "c1/c1_MacroAssembler.hpp"
  37 #include "gc/z/c1/zBarrierSetC1.hpp"
  38 #endif // COMPILER1
  39 
  40 ZBarrierSetAssembler::ZBarrierSetAssembler() :
  41     _load_barrier_slow_stub(),
  42     _load_barrier_weak_slow_stub() {}
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #else
  47 #define BLOCK_COMMENT(str) __ block_comment(str)
  48 #endif
  49 
  50 #undef __
  51 #define __ masm->
  52 
  53 static void call_vm(MacroAssembler* masm,
  54                     address entry_point,
  55                     Register arg0,
  56                     Register arg1) {
  57   // Setup arguments
  58   if (arg1 == c_rarg0) {
  59     if (arg0 == c_rarg1) {
  60       __ xchgptr(c_rarg1, c_rarg0);
  61     } else {
  62       __ movptr(c_rarg1, arg1);
  63       __ movptr(c_rarg0, arg0);
  64     }
  65   } else {
  66     if (arg0 != c_rarg0) {
  67       __ movptr(c_rarg0, arg0);
  68     }
  69     if (arg1 != c_rarg1) {
  70       __ movptr(c_rarg1, arg1);
  71     }
  72   }
  73 
  74   // Call VM
  75   __ MacroAssembler::call_VM_leaf_base(entry_point, 2);
  76 }
  77 
  78 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
  79                                    DecoratorSet decorators,
  80                                    BasicType type,
  81                                    Register dst,
  82                                    Address src,
  83                                    Register tmp1,
  84                                    Register tmp_thread) {
  85   if (!ZBarrierSet::barrier_needed(decorators, type)) {
  86     // Barrier not needed
  87     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
  88     return;
  89   }
  90 
  91   BLOCK_COMMENT("ZBarrierSetAssembler::load_at {");
  92 
  93   // Allocate scratch register
  94   Register scratch = tmp1;
  95   if (tmp1 == noreg) {
  96     scratch = r12;
  97     __ push(scratch);
  98   }
  99 
 100   assert_different_registers(dst, scratch);
 101 
 102   Label done;
 103 
 104   //
 105   // Fast Path
 106   //
 107 
 108   // Load address
 109   __ lea(scratch, src);
 110 
 111   // Load oop at address
 112   __ movptr(dst, Address(scratch, 0));
 113 
 114   // Test address bad mask
 115   __ testptr(dst, address_bad_mask_from_thread(r15_thread));
 116   __ jcc(Assembler::zero, done);
 117 
 118   //
 119   // Slow path
 120   //
 121 
 122   // Save registers
 123   __ push(rax);
 124   __ push(rcx);
 125   __ push(rdx);
 126   __ push(rdi);
 127   __ push(rsi);
 128   __ push(r8);
 129   __ push(r9);
 130   __ push(r10);
 131   __ push(r11);
 132 
 133   // We may end up here from generate_native_wrapper, then the method may have
 134   // floats as arguments, and we must spill them before calling the VM runtime
 135   // leaf. From the interpreter all floats are passed on the stack.
 136   assert(Argument::n_float_register_parameters_j == 8, "Assumption");
 137   const int xmm_size = wordSize * 2;
 138   const int xmm_spill_size = xmm_size * Argument::n_float_register_parameters_j;
 139   __ subptr(rsp, xmm_spill_size);
 140   __ movdqu(Address(rsp, xmm_size * 7), xmm7);
 141   __ movdqu(Address(rsp, xmm_size * 6), xmm6);
 142   __ movdqu(Address(rsp, xmm_size * 5), xmm5);
 143   __ movdqu(Address(rsp, xmm_size * 4), xmm4);
 144   __ movdqu(Address(rsp, xmm_size * 3), xmm3);
 145   __ movdqu(Address(rsp, xmm_size * 2), xmm2);
 146   __ movdqu(Address(rsp, xmm_size * 1), xmm1);
 147   __ movdqu(Address(rsp, xmm_size * 0), xmm0);
 148 
 149   // Call VM
 150   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), dst, scratch);
 151 
 152   // Restore registers
 153   __ movdqu(xmm0, Address(rsp, xmm_size * 0));
 154   __ movdqu(xmm1, Address(rsp, xmm_size * 1));
 155   __ movdqu(xmm2, Address(rsp, xmm_size * 2));
 156   __ movdqu(xmm3, Address(rsp, xmm_size * 3));
 157   __ movdqu(xmm4, Address(rsp, xmm_size * 4));
 158   __ movdqu(xmm5, Address(rsp, xmm_size * 5));
 159   __ movdqu(xmm6, Address(rsp, xmm_size * 6));
 160   __ movdqu(xmm7, Address(rsp, xmm_size * 7));
 161   __ addptr(rsp, xmm_spill_size);
 162 
 163   __ pop(r11);
 164   __ pop(r10);
 165   __ pop(r9);
 166   __ pop(r8);
 167   __ pop(rsi);
 168   __ pop(rdi);
 169   __ pop(rdx);
 170   __ pop(rcx);
 171 
 172   if (dst == rax) {
 173     __ addptr(rsp, wordSize);
 174   } else {
 175     __ movptr(dst, rax);
 176     __ pop(rax);
 177   }
 178 
 179   __ bind(done);
 180 
 181   // Restore scratch register
 182   if (tmp1 == noreg) {
 183     __ pop(scratch);
 184   }
 185 
 186   BLOCK_COMMENT("} ZBarrierSetAssembler::load_at");
 187 }
 188 
 189 #ifdef ASSERT
 190 
 191 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
 192                                     DecoratorSet decorators,
 193                                     BasicType type,
 194                                     Address dst,
 195                                     Register src,
 196                                     Register tmp1,
 197                                     Register tmp2,
 198                                     Register tmp3) {
 199   BLOCK_COMMENT("ZBarrierSetAssembler::store_at {");
 200 
 201   assert(type != T_VALUETYPE, "Not supported yet");
 202   // Verify oop store
 203   if (type == T_OBJECT || type == T_ARRAY) {
 204     // Note that src could be noreg, which means we
 205     // are storing null and can skip verification.
 206     if (src != noreg) {
 207       Label done;
 208       __ testptr(src, address_bad_mask_from_thread(r15_thread));
 209       __ jcc(Assembler::zero, done);
 210       __ stop("Verify oop store failed");
 211       __ should_not_reach_here();
 212       __ bind(done);
 213     }
 214   }
 215 
 216   // Store value
 217   BarrierSetAssembler::store_at(masm, decorators, type, dst, src, tmp1, tmp2, tmp3);
 218 
 219   BLOCK_COMMENT("} ZBarrierSetAssembler::store_at");
 220 }
 221 
 222 #endif // ASSERT
 223 
 224 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
 225                                               DecoratorSet decorators,
 226                                               BasicType type,
 227                                               Register src,
 228                                               Register dst,
 229                                               Register count) {
 230   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 231     // Barrier not needed
 232     return;
 233   }
 234 
 235   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
 236 
 237   // Save registers
 238   __ pusha();
 239 
 240   // Call VM
 241   call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), src, count);
 242 
 243   // Restore registers
 244   __ popa();
 245 
 246   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
 247 }
 248 
 249 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
 250                                                          Register jni_env,
 251                                                          Register obj,
 252                                                          Register tmp,
 253                                                          Label& slowpath) {
 254   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
 255 
 256   // Resolve jobject
 257   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
 258 
 259   // Test address bad mask
 260   __ testptr(obj, address_bad_mask_from_jni_env(jni_env));
 261   __ jcc(Assembler::notZero, slowpath);
 262 
 263   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
 264 }
 265 
 266 #ifdef COMPILER1
 267 
 268 #undef __
 269 #define __ ce->masm()->
 270 
 271 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
 272                                                          LIR_Opr ref) const {
 273   __ testptr(ref->as_register(), address_bad_mask_from_thread(r15_thread));
 274 }
 275 
 276 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
 277                                                          ZLoadBarrierStubC1* stub) const {
 278   // Stub entry
 279   __ bind(*stub->entry());
 280 
 281   Register ref = stub->ref()->as_register();
 282   Register ref_addr = noreg;
 283   Register tmp = noreg;
 284 
 285   if (stub->tmp()->is_valid()) {
 286     // Load address into tmp register
 287     ce->leal(stub->ref_addr(), stub->tmp());
 288     ref_addr = tmp = stub->tmp()->as_pointer_register();
 289   } else {
 290     // Address already in register
 291     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
 292   }
 293 
 294   assert_different_registers(ref, ref_addr, noreg);
 295 
 296   // Save rax unless it is the result or tmp register
 297   if (ref != rax && tmp != rax) {
 298     __ push(rax);
 299   }
 300 
 301   // Setup arguments and call runtime stub
 302   __ subptr(rsp, 2 * BytesPerWord);
 303   ce->store_parameter(ref_addr, 1);
 304   ce->store_parameter(ref, 0);
 305   __ call(RuntimeAddress(stub->runtime_stub()));
 306   __ addptr(rsp, 2 * BytesPerWord);
 307 
 308   // Verify result
 309   __ verify_oop(rax, "Bad oop");
 310 
 311   // Move result into place
 312   if (ref != rax) {
 313     __ movptr(ref, rax);
 314   }
 315 
 316   // Restore rax unless it is the result or tmp register
 317   if (ref != rax && tmp != rax) {
 318     __ pop(rax);
 319   }
 320 
 321   // Stub exit
 322   __ jmp(*stub->continuation());
 323 }
 324 
 325 #undef __
 326 #define __ sasm->
 327 
 328 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
 329                                                                  DecoratorSet decorators) const {
 330   // Enter and save registers
 331   __ enter();
 332   __ save_live_registers_no_oop_map(true /* save_fpu_registers */);
 333 
 334   // Setup arguments
 335   __ load_parameter(1, c_rarg1);
 336   __ load_parameter(0, c_rarg0);
 337 
 338   // Call VM
 339   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 340 
 341   // Restore registers and return
 342   __ restore_live_registers_except_rax(true /* restore_fpu_registers */);
 343   __ leave();
 344   __ ret(0);
 345 }
 346 
 347 #endif // COMPILER1
 348 
 349 #undef __
 350 #define __ cgen->assembler()->
 351 
 352 // Generates a register specific stub for calling
 353 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 354 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 355 //
 356 // The raddr register serves as both input and output for this stub. When the stub is
 357 // called the raddr register contains the object field address (oop*) where the bad oop
 358 // was loaded from, which caused the slow path to be taken. On return from the stub the
 359 // raddr register contains the good/healed oop returned from
 360 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
 361 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
 362 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
 363   // Don't generate stub for invalid registers
 364   if (raddr == rsp || raddr == r15) {
 365     return NULL;
 366   }
 367 
 368   // Create stub name
 369   char name[64];
 370   const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
 371   os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
 372 
 373   __ align(CodeEntryAlignment);
 374   StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
 375   address start = __ pc();
 376 
 377   // Save live registers
 378   if (raddr != rax) {
 379     __ push(rax);
 380   }
 381   if (raddr != rcx) {
 382     __ push(rcx);
 383   }
 384   if (raddr != rdx) {
 385     __ push(rdx);
 386   }
 387   if (raddr != rsi) {
 388     __ push(rsi);
 389   }
 390   if (raddr != rdi) {
 391     __ push(rdi);
 392   }
 393   if (raddr != r8) {
 394     __ push(r8);
 395   }
 396   if (raddr != r9) {
 397     __ push(r9);
 398   }
 399   if (raddr != r10) {
 400     __ push(r10);
 401   }
 402   if (raddr != r11) {
 403     __ push(r11);
 404   }
 405 
 406   // Setup arguments
 407   if (raddr != c_rarg1) {
 408     __ movq(c_rarg1, raddr);
 409   }
 410   __ movq(c_rarg0, Address(raddr, 0));
 411 
 412   // Call barrier function
 413   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
 414 
 415   // Move result returned in rax to raddr, if needed
 416   if (raddr != rax) {
 417     __ movq(raddr, rax);
 418   }
 419 
 420   // Restore saved registers
 421   if (raddr != r11) {
 422     __ pop(r11);
 423   }
 424   if (raddr != r10) {
 425     __ pop(r10);
 426   }
 427   if (raddr != r9) {
 428     __ pop(r9);
 429   }
 430   if (raddr != r8) {
 431     __ pop(r8);
 432   }
 433   if (raddr != rdi) {
 434     __ pop(rdi);
 435   }
 436   if (raddr != rsi) {
 437     __ pop(rsi);
 438   }
 439   if (raddr != rdx) {
 440     __ pop(rdx);
 441   }
 442   if (raddr != rcx) {
 443     __ pop(rcx);
 444   }
 445   if (raddr != rax) {
 446     __ pop(rax);
 447   }
 448 
 449   __ ret(0);
 450 
 451   return start;
 452 }
 453 
 454 #undef __
 455 
 456 static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) {
 457   const int nregs = RegisterImpl::number_of_registers;
 458   const int code_size = nregs * 128; // Rough estimate of code size
 459 
 460   ResourceMark rm;
 461 
 462   CodeBuffer buf(BufferBlob::create(label, code_size));
 463   StubCodeGenerator cgen(&buf);
 464 
 465   for (int i = 0; i < nregs; i++) {
 466     const Register reg = as_Register(i);
 467     stub[i] = generate_load_barrier_stub(&cgen, reg, decorators);
 468   }
 469 }
 470 
 471 void ZBarrierSetAssembler::barrier_stubs_init() {
 472   barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub);
 473   barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub);
 474 }
 475 
 476 address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) {
 477   return _load_barrier_slow_stub[reg->encoding()];
 478 }
 479 
 480 address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) {
 481   return _load_barrier_weak_slow_stub[reg->encoding()];
 482 }