< prev index next >

src/hotspot/share/opto/output.cpp

Print this page
@@ -24,10 +24,11 @@
  
  #include "asm/assembler.inline.hpp"
  #include "code/compiledIC.hpp"
  #include "code/debugInfo.hpp"
  #include "code/debugInfoRec.hpp"
+ #include "code/SCCache.hpp"
  #include "compiler/compileBroker.hpp"
  #include "compiler/compilerDirectives.hpp"
  #include "compiler/disassembler.hpp"
  #include "compiler/oopMap.hpp"
  #include "gc/shared/barrierSet.hpp"

@@ -1367,11 +1368,11 @@
    // nmethod and CodeBuffer count stubs & constants as part of method's code.
    // class HandlerImpl is platform-specific and defined in the *.ad files.
    int exception_handler_req = HandlerImpl::size_exception_handler() + MAX_stubs_size; // add marginal slop for handler
    int deopt_handler_req     = HandlerImpl::size_deopt_handler()     + MAX_stubs_size; // add marginal slop for handler
    stub_req += MAX_stubs_size;   // ensure per-stub margin
-   code_req += MAX_inst_size;    // ensure per-instruction margin
+   code_req += max_inst_size();  // ensure per-instruction margin
  
    if (StressCodeBuffers)
      code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10;  // force expansion
  
    int total_req =

@@ -1564,11 +1565,11 @@
            MachNode *nop = new MachNopNode(nops_cnt);
            block->insert_node(nop, j++);
            last_inst++;
            C->cfg()->map_node_to_block(nop, block);
            // Ensure enough space.
-           masm->code()->insts()->maybe_expand_to_ensure_remaining(MAX_inst_size);
+           masm->code()->insts()->maybe_expand_to_ensure_remaining(max_inst_size());
            if ((masm->code()->blob() == nullptr) || (!CompileBroker::should_compile_new_jobs())) {
              C->record_failure("CodeCache is full");
              return;
            }
            nop->emit(masm, C->regalloc());

@@ -1693,11 +1694,11 @@
            inct_starts[inct_cnt++] = current_offset;
          }
        }
  
        // Verify that there is sufficient space remaining
-       masm->code()->insts()->maybe_expand_to_ensure_remaining(MAX_inst_size);
+       masm->code()->insts()->maybe_expand_to_ensure_remaining(max_inst_size());
        if ((masm->code()->blob() == nullptr) || (!CompileBroker::should_compile_new_jobs())) {
          C->record_failure("CodeCache is full");
          return;
        }
  

@@ -3350,11 +3351,11 @@
    // may be shared by several calls to scratch_emit_size.
    // The allocation of the scratch buffer blob is particularly
    // expensive, since it has to grab the code cache lock.
    BufferBlob* blob = this->scratch_buffer_blob();
    assert(blob != nullptr, "Initialize BufferBlob at start");
-   assert(blob->size() > MAX_inst_size, "sanity");
+   assert(blob->size() > max_inst_size(), "sanity");
    relocInfo* locs_buf = scratch_locs_memory();
    address blob_begin = blob->content_begin();
    address blob_end   = (address)locs_buf;
    assert(blob->contains(blob_end), "sanity");
    CodeBuffer buf(blob_begin, blob_end - blob_begin);

@@ -3395,18 +3396,16 @@
  
    return buf.insts_size();
  }
  
  void PhaseOutput::install() {
-   if (!C->should_install_code()) {
-     return;
-   } else if (C->stub_function() != nullptr) {
+   if (C->should_install_code() && C->stub_function() != nullptr) {
      install_stub(C->stub_name());
    } else {
      install_code(C->method(),
                   C->entry_bci(),
-                  CompileBroker::compiler2(),
+                  CompilerThread::current()->compiler(),
                   C->has_unsafe_access(),
                   SharedRuntime::is_wide_vector(C->max_vector_size()));
    }
  }
  

@@ -3447,19 +3446,31 @@
                                       frame_size_in_words(),
                                       oop_map_set(),
                                       &_handler_table,
                                       inc_table(),
                                       compiler,
+                                      C->has_clinit_barriers(),
+                                      C->for_preload(),
                                       has_unsafe_access,
                                       SharedRuntime::is_wide_vector(C->max_vector_size()),
                                       C->has_monitors(),
                                       C->has_scoped_access(),
-                                      0);
+                                      0,
+                                      C->should_install_code());
  
      if (C->log() != nullptr) { // Print code cache state into compiler log
        C->log()->code_cache_state();
      }
+     if (C->has_clinit_barriers()) {
+       assert(C->for_preload(), "sanity");
+       // Build second version of code without class initialization barriers
+       if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
+         // don't automatically precompile a barrier-free version unless explicitly asked
+       } else {
+         C->record_failure(C2Compiler::retry_no_clinit_barriers());
+       }
+     }
    }
  }
  void PhaseOutput::install_stub(const char* stub_name) {
    // Entry point will be accessed using stub_entry_point();
    if (code_buffer() == nullptr) {

@@ -3654,5 +3665,19 @@
  #ifndef PRODUCT
  void PhaseOutput::print_statistics() {
    Scheduling::print_statistics();
  }
  #endif
+ 
+ int PhaseOutput::max_inst_size() {
+   if (SCCache::is_on_for_write()) {
+     // See the comment in output.hpp.
+     return 16384;
+   } else {
+     return mainline_MAX_inst_size;
+   }
+ }
+ 
+ int PhaseOutput::max_inst_gcstub_size() {
+   assert(mainline_MAX_inst_size <= max_inst_size(), "Sanity");
+   return mainline_MAX_inst_size;
+ }
< prev index next >