< prev index next >

src/share/vm/memory/threadLocalAllocBuffer.cpp

Print this page




  36 // Thread-Local Edens support
  37 
  38 // static member initialization
  39 size_t           ThreadLocalAllocBuffer::_max_size       = 0;
  40 unsigned         ThreadLocalAllocBuffer::_target_refills = 0;
  41 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats   = NULL;
  42 
  43 void ThreadLocalAllocBuffer::clear_before_allocation() {
  44   _slow_refill_waste += (unsigned)remaining();
  45   make_parsable(true);   // also retire the TLAB
  46 }
  47 
  48 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
  49   global_stats()->initialize();
  50 
  51   for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
  52     thread->tlab().accumulate_statistics();
  53     thread->tlab().initialize_statistics();
  54   }
  55 


  56   // Publish new stats if some allocation occurred.
  57   if (global_stats()->allocation() != 0) {
  58     global_stats()->publish();
  59     if (PrintTLAB) {
  60       global_stats()->print();
  61     }
  62   }
  63 }
  64 
  65 void ThreadLocalAllocBuffer::accumulate_statistics() {
  66   Thread* thread = myThread();
  67   size_t capacity = Universe::heap()->tlab_capacity(thread);
  68   size_t used     = Universe::heap()->tlab_used(thread);
  69 
  70   _gc_waste += (unsigned)remaining();
  71   size_t total_allocated = thread->allocated_bytes();
  72   size_t allocated_since_last_gc = total_allocated - _allocated_before_last_gc;
  73   _allocated_before_last_gc = total_allocated;
  74 
  75   if (PrintTLAB && (_number_of_refills > 0 || Verbose)) {
  76     print_stats("gc");
  77   }
  78 
  79   if (_number_of_refills > 0) {
  80     // Update allocation history if a reasonable amount of eden was allocated.
  81     bool update_allocation_history = used > 0.5 * capacity;
  82 
  83     if (update_allocation_history) {
  84       // Average the fraction of eden allocated in a tlab by this
  85       // thread for use in the next resize operation.
  86       // _gc_waste is not subtracted because it's included in
  87       // "used".
  88       // The result can be larger than 1.0 due to direct to old allocations.
  89       // These allocations should ideally not be counted but since it is not possible
  90       // to filter them out here we just cap the fraction to be at most 1.0.
  91       double alloc_frac = MIN2(1.0, (double) allocated_since_last_gc / used);


  98     global_stats()->update_slow_refill_waste(_slow_refill_waste);
  99     global_stats()->update_fast_refill_waste(_fast_refill_waste);
 100 
 101   } else {
 102     assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
 103            _slow_refill_waste == 0 && _gc_waste          == 0,
 104            "tlab stats == 0");
 105   }
 106   global_stats()->update_slow_allocations(_slow_allocations);
 107 }
 108 
 109 // Fills the current tlab with a dummy filler array to create
 110 // an illusion of a contiguous Eden and optionally retires the tlab.
 111 // Waste accounting should be done in caller as appropriate; see,
 112 // for example, clear_before_allocation().
 113 void ThreadLocalAllocBuffer::make_parsable(bool retire) {
 114   if (end() != NULL) {
 115     invariants();
 116 
 117     if (retire) {
 118       myThread()->incr_allocated_bytes(used_bytes());




 119     }
 120 
 121     CollectedHeap::fill_with_object(top(), hard_end(), retire);
 122 
 123     if (retire || ZeroTLAB) {  // "Reset" the TLAB
 124       set_start(NULL);
 125       set_top(NULL);
 126       set_pf_top(NULL);
 127       set_end(NULL);
 128     }
 129   }
 130   assert(!(retire || ZeroTLAB)  ||
 131          (start() == NULL && end() == NULL && top() == NULL),
 132          "TLAB must be reset");
 133 }
 134 
 135 void ThreadLocalAllocBuffer::resize_all_tlabs() {
 136   if (ResizeTLAB) {
 137     for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
 138       thread->tlab().resize();


 176   if (PrintTLAB && Verbose) {
 177     print_stats("fill");
 178   }
 179   assert(top <= start + new_size - alignment_reserve(), "size too small");
 180   initialize(start, top, start + new_size - alignment_reserve());
 181 
 182   // Reset amount of internal fragmentation
 183   set_refill_waste_limit(initial_refill_waste_limit());
 184 }
 185 
 186 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
 187                                         HeapWord* top,
 188                                         HeapWord* end) {
 189   set_start(start);
 190   set_top(top);
 191   set_pf_top(top);
 192   set_end(end);
 193   invariants();
 194 }
 195 
 196 void ThreadLocalAllocBuffer::initialize() {


 197   initialize(NULL,                    // start
 198              NULL,                    // top
 199              NULL);                   // end
 200 
 201   set_desired_size(initial_desired_size());
 202 
 203   // Following check is needed because at startup the main
 204   // thread is initialized before the heap is.  The initialization for
 205   // this thread is redone in startup_initialization below.
 206   if (Universe::heap() != NULL) {
 207     size_t capacity   = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
 208     double alloc_frac = desired_size() * target_refills() / (double) capacity;
 209     _allocation_fraction.sample(alloc_frac);
 210   }
 211 
 212   set_refill_waste_limit(initial_refill_waste_limit());
 213 
 214   initialize_statistics();
 215 }
 216 
 217 void ThreadLocalAllocBuffer::startup_initialization() {
 218 
 219   // Assuming each thread's active tlab is, on average,
 220   // 1/2 full at a GC
 221   _target_refills = 100 / (2 * TLABWasteTargetPercent);
 222   _target_refills = MAX2(_target_refills, (unsigned)1U);
 223 
 224   _global_stats = new GlobalTLABStats();
 225 
 226   // During jvm startup, the main thread is initialized
 227   // before the heap is initialized.  So reinitialize it now.
 228   guarantee(Thread::current()->is_Java_thread(), "tlab initialization thread not Java thread");
 229   Thread::current()->tlab().initialize();



 230 
 231   if (PrintTLAB && Verbose) {
 232     gclog_or_tty->print("TLAB min: " SIZE_FORMAT " initial: " SIZE_FORMAT " max: " SIZE_FORMAT "\n",
 233                         min_size(), Thread::current()->tlab().initial_desired_size(), max_size());
 234   }
 235 }
 236 
 237 size_t ThreadLocalAllocBuffer::initial_desired_size() {
 238   size_t init_sz = 0;
 239 
 240   if (TLABSize > 0) {
 241     init_sz = TLABSize / HeapWordSize;
 242   } else if (global_stats() != NULL) {
 243     // Initial size is a function of the average number of allocating threads.
 244     unsigned nof_threads = global_stats()->allocating_threads_avg();
 245 
 246     init_sz  = (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize) /
 247                       (nof_threads * target_refills());
 248     init_sz = align_object_size(init_sz);
 249   }
 250   init_sz = MIN2(MAX2(init_sz, min_size()), max_size());
 251   return init_sz;
 252 }
 253 
 254 void ThreadLocalAllocBuffer::print_stats(const char* tag) {
 255   Thread* thrd = myThread();
 256   size_t waste = _gc_waste + _slow_refill_waste + _fast_refill_waste;
 257   size_t alloc = _number_of_refills * _desired_size;
 258   double waste_percent = alloc == 0 ? 0.0 :
 259                       100.0 * waste / alloc;
 260   size_t tlab_used  = Universe::heap()->tlab_used(thrd);
 261   gclog_or_tty->print("TLAB: %s thread: " INTPTR_FORMAT " [id: %2d]"
 262                       " desired_size: " SIZE_FORMAT "KB"
 263                       " slow allocs: %d  refill waste: " SIZE_FORMAT "B"
 264                       " alloc:%8.5f %8.0fKB refills: %d waste %4.1f%% gc: %dB"
 265                       " slow: %dB fast: %dB\n",
 266                       tag, thrd, thrd->osthread()->thread_id(),
 267                       _desired_size / (K / HeapWordSize),
 268                       _slow_allocations, _refill_waste_limit * HeapWordSize,
 269                       _allocation_fraction.average(),
 270                       _allocation_fraction.average() * tlab_used / K,
 271                       _number_of_refills, waste_percent,
 272                       _gc_waste * HeapWordSize,
 273                       _slow_refill_waste * HeapWordSize,
 274                       _fast_refill_waste * HeapWordSize);
 275 }
 276 
 277 void ThreadLocalAllocBuffer::verify() {
 278   HeapWord* p = start();
 279   HeapWord* t = top();
 280   HeapWord* prev_p = NULL;
 281   while (p < t) {
 282     oop(p)->verify();
 283     prev_p = p;
 284     p += oop(p)->size();
 285   }
 286   guarantee(p == top(), "end of last object must match end of space");
 287 }
 288 
 289 Thread* ThreadLocalAllocBuffer::myThread() {
 290   return (Thread*)(((char *)this) +
 291                    in_bytes(start_offset()) -
 292                    in_bytes(Thread::tlab_start_offset()));













 293 }
 294 
 295 
 296 GlobalTLABStats::GlobalTLABStats() :
 297   _allocating_threads_avg(TLABAllocationWeight) {
 298 
 299   initialize();
 300 
 301   _allocating_threads_avg.sample(1); // One allocating thread at startup
 302 
 303   if (UsePerfData) {
 304 
 305     EXCEPTION_MARK;
 306     ResourceMark rm;
 307 
 308     char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
 309     _perf_allocating_threads =
 310       PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
 311 
 312     cname = PerfDataManager::counter_name("tlab", "fills");




  36 // Thread-Local Edens support
  37 
  38 // static member initialization
  39 size_t           ThreadLocalAllocBuffer::_max_size       = 0;
  40 unsigned         ThreadLocalAllocBuffer::_target_refills = 0;
  41 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats   = NULL;
  42 
  43 void ThreadLocalAllocBuffer::clear_before_allocation() {
  44   _slow_refill_waste += (unsigned)remaining();
  45   make_parsable(true);   // also retire the TLAB
  46 }
  47 
  48 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
  49   global_stats()->initialize();
  50 
  51   for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
  52     thread->tlab().accumulate_statistics();
  53     thread->tlab().initialize_statistics();
  54   }
  55 
  56   Universe::heap()->accumulate_statistics_all_gclabs();
  57 
  58   // Publish new stats if some allocation occurred.
  59   if (global_stats()->allocation() != 0) {
  60     global_stats()->publish();
  61     if (PrintTLAB) {
  62       global_stats()->print();
  63     }
  64   }
  65 }
  66 
  67 void ThreadLocalAllocBuffer::accumulate_statistics() {
  68   Thread* thread = myThread();
  69   size_t capacity = Universe::heap()->tlab_capacity(thread);
  70   size_t used     = Universe::heap()->tlab_used(thread);
  71 
  72   _gc_waste += (unsigned)remaining();
  73   size_t total_allocated = _gclab ? thread->allocated_bytes_gclab() : thread->allocated_bytes();
  74   size_t allocated_since_last_gc = total_allocated - _allocated_before_last_gc;
  75   _allocated_before_last_gc = total_allocated;
  76 
  77   if (PrintTLAB && (_number_of_refills > 0 || Verbose)) {
  78     print_stats("gc");
  79   }
  80 
  81   if (_number_of_refills > 0) {
  82     // Update allocation history if a reasonable amount of eden was allocated.
  83     bool update_allocation_history = used > 0.5 * capacity;
  84 
  85     if (update_allocation_history) {
  86       // Average the fraction of eden allocated in a tlab by this
  87       // thread for use in the next resize operation.
  88       // _gc_waste is not subtracted because it's included in
  89       // "used".
  90       // The result can be larger than 1.0 due to direct to old allocations.
  91       // These allocations should ideally not be counted but since it is not possible
  92       // to filter them out here we just cap the fraction to be at most 1.0.
  93       double alloc_frac = MIN2(1.0, (double) allocated_since_last_gc / used);


 100     global_stats()->update_slow_refill_waste(_slow_refill_waste);
 101     global_stats()->update_fast_refill_waste(_fast_refill_waste);
 102 
 103   } else {
 104     assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
 105            _slow_refill_waste == 0 && _gc_waste          == 0,
 106            "tlab stats == 0");
 107   }
 108   global_stats()->update_slow_allocations(_slow_allocations);
 109 }
 110 
 111 // Fills the current tlab with a dummy filler array to create
 112 // an illusion of a contiguous Eden and optionally retires the tlab.
 113 // Waste accounting should be done in caller as appropriate; see,
 114 // for example, clear_before_allocation().
 115 void ThreadLocalAllocBuffer::make_parsable(bool retire) {
 116   if (end() != NULL) {
 117     invariants();
 118 
 119     if (retire) {
 120       if (_gclab) {
 121         myThread()->incr_allocated_bytes_gclab(used_bytes());
 122       } else {
 123         myThread()->incr_allocated_bytes(used_bytes());
 124       }
 125     }
 126 
 127     CollectedHeap::fill_with_object(top(), hard_end(), retire);
 128 
 129     if (retire || ZeroTLAB) {  // "Reset" the TLAB
 130       set_start(NULL);
 131       set_top(NULL);
 132       set_pf_top(NULL);
 133       set_end(NULL);
 134     }
 135   }
 136   assert(!(retire || ZeroTLAB)  ||
 137          (start() == NULL && end() == NULL && top() == NULL),
 138          "TLAB must be reset");
 139 }
 140 
 141 void ThreadLocalAllocBuffer::resize_all_tlabs() {
 142   if (ResizeTLAB) {
 143     for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
 144       thread->tlab().resize();


 182   if (PrintTLAB && Verbose) {
 183     print_stats("fill");
 184   }
 185   assert(top <= start + new_size - alignment_reserve(), "size too small");
 186   initialize(start, top, start + new_size - alignment_reserve());
 187 
 188   // Reset amount of internal fragmentation
 189   set_refill_waste_limit(initial_refill_waste_limit());
 190 }
 191 
 192 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
 193                                         HeapWord* top,
 194                                         HeapWord* end) {
 195   set_start(start);
 196   set_top(top);
 197   set_pf_top(top);
 198   set_end(end);
 199   invariants();
 200 }
 201 
 202 void ThreadLocalAllocBuffer::initialize(bool gclab) {
 203   _initialized = true;
 204   _gclab = gclab;
 205   initialize(NULL,                    // start
 206              NULL,                    // top
 207              NULL);                   // end
 208 
 209   set_desired_size(initial_desired_size());
 210 
 211   // Following check is needed because at startup the main
 212   // thread is initialized before the heap is.  The initialization for
 213   // this thread is redone in startup_initialization below.
 214   if (Universe::heap() != NULL) {
 215     size_t capacity   = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
 216     double alloc_frac = desired_size() * target_refills() / (double) capacity;
 217     _allocation_fraction.sample(alloc_frac);
 218   }
 219 
 220   set_refill_waste_limit(initial_refill_waste_limit());
 221 
 222   initialize_statistics();
 223 }
 224 
 225 void ThreadLocalAllocBuffer::startup_initialization() {
 226 
 227   // Assuming each thread's active tlab is, on average,
 228   // 1/2 full at a GC
 229   _target_refills = 100 / (2 * TLABWasteTargetPercent);
 230   _target_refills = MAX2(_target_refills, (unsigned)1U);
 231 
 232   _global_stats = new GlobalTLABStats();
 233 
 234   // During jvm startup, the main thread is initialized
 235   // before the heap is initialized.  So reinitialize it now.
 236   guarantee(Thread::current()->is_Java_thread(), "tlab initialization thread not Java thread");
 237   Thread::current()->tlab().initialize(false);
 238   if (UseShenandoahGC) {
 239     Thread::current()->gclab().initialize(true);
 240   }
 241 
 242   if (PrintTLAB && Verbose) {
 243     gclog_or_tty->print("TLAB min: " SIZE_FORMAT " initial: " SIZE_FORMAT " max: " SIZE_FORMAT "\n",
 244                         min_size(), Thread::current()->tlab().initial_desired_size(), max_size());
 245   }
 246 }
 247 
 248 size_t ThreadLocalAllocBuffer::initial_desired_size() {
 249   size_t init_sz = 0;
 250 
 251   if (TLABSize > 0) {
 252     init_sz = TLABSize / HeapWordSize;
 253   } else if (global_stats() != NULL) {
 254     // Initial size is a function of the average number of allocating threads.
 255     unsigned nof_threads = global_stats()->allocating_threads_avg();
 256 
 257     init_sz  = (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize) /
 258                       (nof_threads * target_refills());
 259     init_sz = align_object_size(init_sz);
 260   }
 261   init_sz = MIN2(MAX2(init_sz, min_size()), max_size());
 262   return init_sz;
 263 }
 264 
 265 void ThreadLocalAllocBuffer::print_stats(const char* tag) {
 266   Thread* thrd = myThread();
 267   size_t waste = _gc_waste + _slow_refill_waste + _fast_refill_waste;
 268   size_t alloc = _number_of_refills * _desired_size;
 269   double waste_percent = alloc == 0 ? 0.0 :
 270                       100.0 * waste / alloc;
 271   size_t tlab_used  = Universe::heap()->tlab_used(thrd);
 272   gclog_or_tty->print("TLAB: %s %s thread: " INTPTR_FORMAT " [id: %2d]"
 273                       " desired_size: " SIZE_FORMAT "KB"
 274                       " slow allocs: %d  refill waste: " SIZE_FORMAT "B"
 275                       " alloc:%8.5f %8.0fKB refills: %d waste %4.1f%% gc: %dB"
 276                       " slow: %dB fast: %dB\n",
 277                       tag, _gclab ? "gclab" : "tlab ", p2i(thrd), thrd->osthread()->thread_id(),
 278                       _desired_size / (K / HeapWordSize),
 279                       _slow_allocations, _refill_waste_limit * HeapWordSize,
 280                       _allocation_fraction.average(),
 281                       _allocation_fraction.average() * tlab_used / K,
 282                       _number_of_refills, waste_percent,
 283                       _gc_waste * HeapWordSize,
 284                       _slow_refill_waste * HeapWordSize,
 285                       _fast_refill_waste * HeapWordSize);
 286 }
 287 
 288 void ThreadLocalAllocBuffer::verify() {
 289   HeapWord* p = start();
 290   HeapWord* t = top();
 291   HeapWord* prev_p = NULL;
 292   while (p < t) {
 293     oop(p)->verify();
 294     prev_p = p;
 295     p += oop(p)->size();
 296   }
 297   guarantee(p == top(), "end of last object must match end of space");
 298 }
 299 
 300 Thread* ThreadLocalAllocBuffer::myThread() {
 301   ByteSize gclab_offset = Thread::gclab_start_offset();
 302   ByteSize tlab_offset = Thread::tlab_start_offset();
 303   ByteSize offs = _gclab ? gclab_offset : tlab_offset;
 304   Thread* thread = (Thread*)(((char *)this) +
 305                    in_bytes(start_offset()) - in_bytes(offs));
 306 #ifdef ASSERT
 307   assert(this == (_gclab ? &thread->gclab() : &thread->tlab()), "must be");
 308 #endif
 309   return thread;
 310 }
 311 
 312 void ThreadLocalAllocBuffer::rollback(size_t size) {
 313   HeapWord* old_top = top();
 314   if (old_top != NULL) { // Pathological case: we accept that we can't rollback.
 315     set_top(old_top - size);
 316   }
 317 }
 318 
 319 
 320 GlobalTLABStats::GlobalTLABStats() :
 321   _allocating_threads_avg(TLABAllocationWeight) {
 322 
 323   initialize();
 324 
 325   _allocating_threads_avg.sample(1); // One allocating thread at startup
 326 
 327   if (UsePerfData) {
 328 
 329     EXCEPTION_MARK;
 330     ResourceMark rm;
 331 
 332     char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
 333     _perf_allocating_threads =
 334       PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
 335 
 336     cname = PerfDataManager::counter_name("tlab", "fills");


< prev index next >