36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
41 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
42
43 void ThreadLocalAllocBuffer::clear_before_allocation() {
44 _slow_refill_waste += (unsigned)remaining();
45 make_parsable(true); // also retire the TLAB
46 }
47
48 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
49 global_stats()->initialize();
50
51 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
52 thread->tlab().accumulate_statistics();
53 thread->tlab().initialize_statistics();
54 }
55
56 // Publish new stats if some allocation occurred.
57 if (global_stats()->allocation() != 0) {
58 global_stats()->publish();
59 if (PrintTLAB) {
60 global_stats()->print();
61 }
62 }
63 }
64
65 void ThreadLocalAllocBuffer::accumulate_statistics() {
66 Thread* thread = myThread();
67 size_t capacity = Universe::heap()->tlab_capacity(thread);
68 size_t used = Universe::heap()->tlab_used(thread);
69
70 _gc_waste += (unsigned)remaining();
71 size_t total_allocated = thread->allocated_bytes();
72 size_t allocated_since_last_gc = total_allocated - _allocated_before_last_gc;
73 _allocated_before_last_gc = total_allocated;
74
75 if (PrintTLAB && (_number_of_refills > 0 || Verbose)) {
76 print_stats("gc");
77 }
78
79 if (_number_of_refills > 0) {
80 // Update allocation history if a reasonable amount of eden was allocated.
81 bool update_allocation_history = used > 0.5 * capacity;
82
83 if (update_allocation_history) {
84 // Average the fraction of eden allocated in a tlab by this
85 // thread for use in the next resize operation.
86 // _gc_waste is not subtracted because it's included in
87 // "used".
88 // The result can be larger than 1.0 due to direct to old allocations.
89 // These allocations should ideally not be counted but since it is not possible
90 // to filter them out here we just cap the fraction to be at most 1.0.
91 // Keep alloc_frac as float and not double to avoid the double to float conversion
99 global_stats()->update_slow_refill_waste(_slow_refill_waste);
100 global_stats()->update_fast_refill_waste(_fast_refill_waste);
101
102 } else {
103 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
104 _slow_refill_waste == 0 && _gc_waste == 0,
105 "tlab stats == 0");
106 }
107 global_stats()->update_slow_allocations(_slow_allocations);
108 }
109
110 // Fills the current tlab with a dummy filler array to create
111 // an illusion of a contiguous Eden and optionally retires the tlab.
112 // Waste accounting should be done in caller as appropriate; see,
113 // for example, clear_before_allocation().
114 void ThreadLocalAllocBuffer::make_parsable(bool retire) {
115 if (end() != NULL) {
116 invariants();
117
118 if (retire) {
119 myThread()->incr_allocated_bytes(used_bytes());
120 }
121
122 CollectedHeap::fill_with_object(top(), hard_end(), retire);
123
124 if (retire || ZeroTLAB) { // "Reset" the TLAB
125 set_start(NULL);
126 set_top(NULL);
127 set_pf_top(NULL);
128 set_end(NULL);
129 }
130 }
131 assert(!(retire || ZeroTLAB) ||
132 (start() == NULL && end() == NULL && top() == NULL),
133 "TLAB must be reset");
134 }
135
136 void ThreadLocalAllocBuffer::resize_all_tlabs() {
137 if (ResizeTLAB) {
138 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
139 thread->tlab().resize();
177 if (PrintTLAB && Verbose) {
178 print_stats("fill");
179 }
180 assert(top <= start + new_size - alignment_reserve(), "size too small");
181 initialize(start, top, start + new_size - alignment_reserve());
182
183 // Reset amount of internal fragmentation
184 set_refill_waste_limit(initial_refill_waste_limit());
185 }
186
187 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
188 HeapWord* top,
189 HeapWord* end) {
190 set_start(start);
191 set_top(top);
192 set_pf_top(top);
193 set_end(end);
194 invariants();
195 }
196
197 void ThreadLocalAllocBuffer::initialize() {
198 initialize(NULL, // start
199 NULL, // top
200 NULL); // end
201
202 set_desired_size(initial_desired_size());
203
204 // Following check is needed because at startup the main
205 // thread is initialized before the heap is. The initialization for
206 // this thread is redone in startup_initialization below.
207 if (Universe::heap() != NULL) {
208 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
209 // Keep alloc_frac as float and not double to avoid the double to float conversion
210 float alloc_frac = desired_size() * target_refills() / (float) capacity;
211 _allocation_fraction.sample(alloc_frac);
212 }
213
214 set_refill_waste_limit(initial_refill_waste_limit());
215
216 initialize_statistics();
217 }
218
219 void ThreadLocalAllocBuffer::startup_initialization() {
220
221 // Assuming each thread's active tlab is, on average,
222 // 1/2 full at a GC
223 _target_refills = 100 / (2 * TLABWasteTargetPercent);
224 _target_refills = MAX2(_target_refills, (unsigned)1U);
225
226 _global_stats = new GlobalTLABStats();
227
228 // During jvm startup, the main thread is initialized
229 // before the heap is initialized. So reinitialize it now.
230 guarantee(Thread::current()->is_Java_thread(), "tlab initialization thread not Java thread");
231 Thread::current()->tlab().initialize();
232
233 if (PrintTLAB && Verbose) {
234 gclog_or_tty->print("TLAB min: " SIZE_FORMAT " initial: " SIZE_FORMAT " max: " SIZE_FORMAT "\n",
235 min_size(), Thread::current()->tlab().initial_desired_size(), max_size());
236 }
237 }
238
239 size_t ThreadLocalAllocBuffer::initial_desired_size() {
240 size_t init_sz = 0;
241
242 if (TLABSize > 0) {
243 init_sz = TLABSize / HeapWordSize;
244 } else if (global_stats() != NULL) {
245 // Initial size is a function of the average number of allocating threads.
246 unsigned nof_threads = global_stats()->allocating_threads_avg();
247
248 init_sz = (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize) /
249 (nof_threads * target_refills());
250 init_sz = align_object_size(init_sz);
251 }
252 init_sz = MIN2(MAX2(init_sz, min_size()), max_size());
253 return init_sz;
254 }
255
256 void ThreadLocalAllocBuffer::print_stats(const char* tag) {
257 Thread* thrd = myThread();
258 size_t waste = _gc_waste + _slow_refill_waste + _fast_refill_waste;
259 size_t alloc = _number_of_refills * _desired_size;
260 double waste_percent = alloc == 0 ? 0.0 :
261 100.0 * waste / alloc;
262 size_t tlab_used = Universe::heap()->tlab_used(thrd);
263 gclog_or_tty->print("TLAB: %s thread: " INTPTR_FORMAT " [id: %2d]"
264 " desired_size: " SIZE_FORMAT "KB"
265 " slow allocs: %d refill waste: " SIZE_FORMAT "B"
266 " alloc:%8.5f %8.0fKB refills: %d waste %4.1f%% gc: %dB"
267 " slow: %dB fast: %dB\n",
268 tag, thrd, thrd->osthread()->thread_id(),
269 _desired_size / (K / HeapWordSize),
270 _slow_allocations, _refill_waste_limit * HeapWordSize,
271 _allocation_fraction.average(),
272 _allocation_fraction.average() * tlab_used / K,
273 _number_of_refills, waste_percent,
274 _gc_waste * HeapWordSize,
275 _slow_refill_waste * HeapWordSize,
276 _fast_refill_waste * HeapWordSize);
277 }
278
279 void ThreadLocalAllocBuffer::verify() {
280 HeapWord* p = start();
281 HeapWord* t = top();
282 HeapWord* prev_p = NULL;
283 while (p < t) {
284 oop(p)->verify();
285 prev_p = p;
286 p += oop(p)->size();
287 }
288 guarantee(p == top(), "end of last object must match end of space");
289 }
290
291 Thread* ThreadLocalAllocBuffer::myThread() {
292 return (Thread*)(((char *)this) +
293 in_bytes(start_offset()) -
294 in_bytes(Thread::tlab_start_offset()));
295 }
296
297
298 GlobalTLABStats::GlobalTLABStats() :
299 _allocating_threads_avg(TLABAllocationWeight) {
300
301 initialize();
302
303 _allocating_threads_avg.sample(1); // One allocating thread at startup
304
305 if (UsePerfData) {
306
307 EXCEPTION_MARK;
308 ResourceMark rm;
309
310 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
311 _perf_allocating_threads =
312 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
313
314 cname = PerfDataManager::counter_name("tlab", "fills");
|
36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
41 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
42
43 void ThreadLocalAllocBuffer::clear_before_allocation() {
44 _slow_refill_waste += (unsigned)remaining();
45 make_parsable(true); // also retire the TLAB
46 }
47
48 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
49 global_stats()->initialize();
50
51 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
52 thread->tlab().accumulate_statistics();
53 thread->tlab().initialize_statistics();
54 }
55
56 Universe::heap()->accumulate_statistics_all_gclabs();
57
58 // Publish new stats if some allocation occurred.
59 if (global_stats()->allocation() != 0) {
60 global_stats()->publish();
61 if (PrintTLAB) {
62 global_stats()->print();
63 }
64 }
65 }
66
67 void ThreadLocalAllocBuffer::accumulate_statistics() {
68 Thread* thread = myThread();
69 size_t capacity = Universe::heap()->tlab_capacity(thread);
70 size_t used = Universe::heap()->tlab_used(thread);
71
72 _gc_waste += (unsigned)remaining();
73 size_t total_allocated = _gclab ? thread->allocated_bytes_gclab() : thread->allocated_bytes();
74 size_t allocated_since_last_gc = total_allocated - _allocated_before_last_gc;
75 _allocated_before_last_gc = total_allocated;
76
77 if (PrintTLAB && (_number_of_refills > 0 || Verbose)) {
78 print_stats("gc");
79 }
80
81 if (_number_of_refills > 0) {
82 // Update allocation history if a reasonable amount of eden was allocated.
83 bool update_allocation_history = used > 0.5 * capacity;
84
85 if (update_allocation_history) {
86 // Average the fraction of eden allocated in a tlab by this
87 // thread for use in the next resize operation.
88 // _gc_waste is not subtracted because it's included in
89 // "used".
90 // The result can be larger than 1.0 due to direct to old allocations.
91 // These allocations should ideally not be counted but since it is not possible
92 // to filter them out here we just cap the fraction to be at most 1.0.
93 // Keep alloc_frac as float and not double to avoid the double to float conversion
101 global_stats()->update_slow_refill_waste(_slow_refill_waste);
102 global_stats()->update_fast_refill_waste(_fast_refill_waste);
103
104 } else {
105 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
106 _slow_refill_waste == 0 && _gc_waste == 0,
107 "tlab stats == 0");
108 }
109 global_stats()->update_slow_allocations(_slow_allocations);
110 }
111
112 // Fills the current tlab with a dummy filler array to create
113 // an illusion of a contiguous Eden and optionally retires the tlab.
114 // Waste accounting should be done in caller as appropriate; see,
115 // for example, clear_before_allocation().
116 void ThreadLocalAllocBuffer::make_parsable(bool retire) {
117 if (end() != NULL) {
118 invariants();
119
120 if (retire) {
121 if (_gclab) {
122 myThread()->incr_allocated_bytes_gclab(used_bytes());
123 } else {
124 myThread()->incr_allocated_bytes(used_bytes());
125 }
126 }
127
128 CollectedHeap::fill_with_object(top(), hard_end(), retire);
129
130 if (retire || ZeroTLAB) { // "Reset" the TLAB
131 set_start(NULL);
132 set_top(NULL);
133 set_pf_top(NULL);
134 set_end(NULL);
135 }
136 }
137 assert(!(retire || ZeroTLAB) ||
138 (start() == NULL && end() == NULL && top() == NULL),
139 "TLAB must be reset");
140 }
141
142 void ThreadLocalAllocBuffer::resize_all_tlabs() {
143 if (ResizeTLAB) {
144 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
145 thread->tlab().resize();
183 if (PrintTLAB && Verbose) {
184 print_stats("fill");
185 }
186 assert(top <= start + new_size - alignment_reserve(), "size too small");
187 initialize(start, top, start + new_size - alignment_reserve());
188
189 // Reset amount of internal fragmentation
190 set_refill_waste_limit(initial_refill_waste_limit());
191 }
192
193 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
194 HeapWord* top,
195 HeapWord* end) {
196 set_start(start);
197 set_top(top);
198 set_pf_top(top);
199 set_end(end);
200 invariants();
201 }
202
203 void ThreadLocalAllocBuffer::initialize(bool gclab) {
204 _initialized = true;
205 _gclab = gclab;
206 initialize(NULL, // start
207 NULL, // top
208 NULL); // end
209
210 set_desired_size(initial_desired_size());
211
212 // Following check is needed because at startup the main
213 // thread is initialized before the heap is. The initialization for
214 // this thread is redone in startup_initialization below.
215 if (Universe::heap() != NULL) {
216 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
217 // Keep alloc_frac as float and not double to avoid the double to float conversion
218 float alloc_frac = desired_size() * target_refills() / (float) capacity;
219 _allocation_fraction.sample(alloc_frac);
220 }
221
222 set_refill_waste_limit(initial_refill_waste_limit());
223
224 initialize_statistics();
225 }
226
227 void ThreadLocalAllocBuffer::startup_initialization() {
228
229 // Assuming each thread's active tlab is, on average,
230 // 1/2 full at a GC
231 _target_refills = 100 / (2 * TLABWasteTargetPercent);
232 _target_refills = MAX2(_target_refills, (unsigned)1U);
233
234 _global_stats = new GlobalTLABStats();
235
236 // During jvm startup, the main thread is initialized
237 // before the heap is initialized. So reinitialize it now.
238 guarantee(Thread::current()->is_Java_thread(), "tlab initialization thread not Java thread");
239 Thread::current()->tlab().initialize(false);
240 if (UseShenandoahGC) {
241 Thread::current()->gclab().initialize(true);
242 }
243
244 if (PrintTLAB && Verbose) {
245 gclog_or_tty->print("TLAB min: " SIZE_FORMAT " initial: " SIZE_FORMAT " max: " SIZE_FORMAT "\n",
246 min_size(), Thread::current()->tlab().initial_desired_size(), max_size());
247 }
248 }
249
250 size_t ThreadLocalAllocBuffer::initial_desired_size() {
251 size_t init_sz = 0;
252
253 if (TLABSize > 0) {
254 init_sz = TLABSize / HeapWordSize;
255 } else if (global_stats() != NULL) {
256 // Initial size is a function of the average number of allocating threads.
257 unsigned nof_threads = global_stats()->allocating_threads_avg();
258
259 init_sz = (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize) /
260 (nof_threads * target_refills());
261 init_sz = align_object_size(init_sz);
262 }
263 init_sz = MIN2(MAX2(init_sz, min_size()), max_size());
264 return init_sz;
265 }
266
267 void ThreadLocalAllocBuffer::print_stats(const char* tag) {
268 Thread* thrd = myThread();
269 size_t waste = _gc_waste + _slow_refill_waste + _fast_refill_waste;
270 size_t alloc = _number_of_refills * _desired_size;
271 double waste_percent = alloc == 0 ? 0.0 :
272 100.0 * waste / alloc;
273 size_t tlab_used = Universe::heap()->tlab_used(thrd);
274 gclog_or_tty->print("TLAB: %s %s thread: " INTPTR_FORMAT " [id: %2d]"
275 " desired_size: " SIZE_FORMAT "KB"
276 " slow allocs: %d refill waste: " SIZE_FORMAT "B"
277 " alloc:%8.5f %8.0fKB refills: %d waste %4.1f%% gc: %dB"
278 " slow: %dB fast: %dB\n",
279 tag, _gclab ? "gclab" : "tlab ", p2i(thrd), thrd->osthread()->thread_id(),
280 _desired_size / (K / HeapWordSize),
281 _slow_allocations, _refill_waste_limit * HeapWordSize,
282 _allocation_fraction.average(),
283 _allocation_fraction.average() * tlab_used / K,
284 _number_of_refills, waste_percent,
285 _gc_waste * HeapWordSize,
286 _slow_refill_waste * HeapWordSize,
287 _fast_refill_waste * HeapWordSize);
288 }
289
290 void ThreadLocalAllocBuffer::verify() {
291 HeapWord* p = start();
292 HeapWord* t = top();
293 HeapWord* prev_p = NULL;
294 while (p < t) {
295 oop(p)->verify();
296 prev_p = p;
297 p += oop(p)->size();
298 }
299 guarantee(p == top(), "end of last object must match end of space");
300 }
301
302 Thread* ThreadLocalAllocBuffer::myThread() {
303 ByteSize gclab_offset = Thread::gclab_start_offset();
304 ByteSize tlab_offset = Thread::tlab_start_offset();
305 ByteSize offs = _gclab ? gclab_offset : tlab_offset;
306 Thread* thread = (Thread*)(((char *)this) +
307 in_bytes(start_offset()) - in_bytes(offs));
308 #ifdef ASSERT
309 assert(this == (_gclab ? &thread->gclab() : &thread->tlab()), "must be");
310 #endif
311 return thread;
312 }
313
314 void ThreadLocalAllocBuffer::rollback(size_t size) {
315 HeapWord* old_top = top();
316 if (old_top != NULL) { // Pathological case: we accept that we can't rollback.
317 set_top(old_top - size);
318 }
319 }
320
321
322 GlobalTLABStats::GlobalTLABStats() :
323 _allocating_threads_avg(TLABAllocationWeight) {
324
325 initialize();
326
327 _allocating_threads_avg.sample(1); // One allocating thread at startup
328
329 if (UsePerfData) {
330
331 EXCEPTION_MARK;
332 ResourceMark rm;
333
334 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
335 _perf_allocating_threads =
336 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
337
338 cname = PerfDataManager::counter_name("tlab", "fills");
|