< prev index next >

src/hotspot/share/gc/parallel/mutableSpace.cpp

Print this page

103         const size_t change_size = head_size + tail_size;
104         const float setup_rate_words = NUMASpaceResizeRate >> LogBytesPerWord;
105         head_size = MIN2((size_t)(setup_rate_words * head_size / change_size),
106                          head_size);
107         tail_size = MIN2((size_t)(setup_rate_words * tail_size / change_size),
108                          tail_size);
109       }
110       head = MemRegion(intersection.start() - head_size, intersection.start());
111       tail = MemRegion(intersection.end(), intersection.end() + tail_size);
112     }
113     assert(mr.contains(head) && mr.contains(tail), "Sanity");
114 
115     size_t page_size = alignment();
116 
117     if (UseNUMA) {
118       numa_setup_pages(head, page_size, clear_space);
119       numa_setup_pages(tail, page_size, clear_space);
120     }
121 
122     if (AlwaysPreTouch) {

123       PretouchTask::pretouch("ParallelGC PreTouch head", (char*)head.start(), (char*)head.end(),
124                              page_size, pretouch_workers);
125 
126       PretouchTask::pretouch("ParallelGC PreTouch tail", (char*)tail.start(), (char*)tail.end(),
127                              page_size, pretouch_workers);
128     }
129 
130     // Remember where we stopped so that we can continue later.
131     set_last_setup_region(MemRegion(head.start(), tail.end()));
132   }
133 
134   set_bottom(mr.start());
135   // When expanding concurrently with callers of cas_allocate, setting end
136   // makes the new space available for allocation by other threads.  So this
137   // assignment must follow all other configuration and initialization that
138   // might be done for expansion.
139   Atomic::release_store(end_addr(), mr.end());
140 
141   if (clear_space) {
142     clear(mangle_space);
143   }
144 }
145 
146 void MutableSpace::clear(bool mangle_space) {
147   set_top(bottom());

103         const size_t change_size = head_size + tail_size;
104         const float setup_rate_words = NUMASpaceResizeRate >> LogBytesPerWord;
105         head_size = MIN2((size_t)(setup_rate_words * head_size / change_size),
106                          head_size);
107         tail_size = MIN2((size_t)(setup_rate_words * tail_size / change_size),
108                          tail_size);
109       }
110       head = MemRegion(intersection.start() - head_size, intersection.start());
111       tail = MemRegion(intersection.end(), intersection.end() + tail_size);
112     }
113     assert(mr.contains(head) && mr.contains(tail), "Sanity");
114 
115     size_t page_size = alignment();
116 
117     if (UseNUMA) {
118       numa_setup_pages(head, page_size, clear_space);
119       numa_setup_pages(tail, page_size, clear_space);
120     }
121 
122     if (AlwaysPreTouch) {
123       size_t pretouch_page_size = UseLargePages ? page_size : os::vm_page_size();
124       PretouchTask::pretouch("ParallelGC PreTouch head", (char*)head.start(), (char*)head.end(),
125                              pretouch_page_size, pretouch_workers);
126 
127       PretouchTask::pretouch("ParallelGC PreTouch tail", (char*)tail.start(), (char*)tail.end(),
128                              pretouch_page_size, pretouch_workers);
129     }
130 
131     // Remember where we stopped so that we can continue later.
132     set_last_setup_region(MemRegion(head.start(), tail.end()));
133   }
134 
135   set_bottom(mr.start());
136   // When expanding concurrently with callers of cas_allocate, setting end
137   // makes the new space available for allocation by other threads.  So this
138   // assignment must follow all other configuration and initialization that
139   // might be done for expansion.
140   Atomic::release_store(end_addr(), mr.end());
141 
142   if (clear_space) {
143     clear(mangle_space);
144   }
145 }
146 
147 void MutableSpace::clear(bool mangle_space) {
148   set_top(bottom());
< prev index next >