8242602: Shenandoah: allow earlier recycle of trashed regions during concurrent root processing

Reviewed-by: shade
This commit is contained in:
Zhengyu Gu 2020-04-14 20:03:29 -04:00
parent 0278846eaa
commit 346d09e7aa
7 changed files with 26 additions and 8 deletions

View File

@ -193,7 +193,7 @@ oop ShenandoahBarrierSet::load_reference_barrier_native_impl(oop obj, T* load_ad
}
ShenandoahMarkingContext* const marking_context = _heap->marking_context();
if (_heap->is_concurrent_root_in_progress() && !marking_context->is_marked(obj)) {
if (_heap->is_concurrent_weak_root_in_progress() && !marking_context->is_marked(obj)) {
Thread* thr = Thread::current();
if (thr->is_Java_thread()) {
return NULL;

View File

@ -149,7 +149,7 @@ HeapWord* ShenandoahFreeSet::allocate_single(ShenandoahAllocRequest& req, bool&
HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, ShenandoahAllocRequest& req, bool& in_new_region) {
assert (!has_no_alloc_capacity(r), "Performance: should avoid full regions on this path: " SIZE_FORMAT, r->index());
if (_heap->is_concurrent_root_in_progress() &&
if (_heap->is_concurrent_weak_root_in_progress() &&
r->is_trash()) {
return NULL;
}
@ -337,7 +337,7 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {
}
bool ShenandoahFreeSet::can_allocate_from(ShenandoahHeapRegion *r) {
return r->is_empty() || (r->is_trash() && !_heap->is_concurrent_root_in_progress());
return r->is_empty() || (r->is_trash() && !_heap->is_concurrent_weak_root_in_progress());
}
size_t ShenandoahFreeSet::alloc_capacity(ShenandoahHeapRegion *r) {

View File

@ -1233,7 +1233,7 @@ private:
T o = RawAccess<>::oop_load(p);
if (!CompressedOops::is_null(o)) {
oop obj = CompressedOops::decode_not_null(o);
if (_heap->is_concurrent_root_in_progress() && !_marking_context->is_marked(obj)) {
if (_heap->is_concurrent_weak_root_in_progress() && !_marking_context->is_marked(obj)) {
// There may be dead oops in weak roots in concurrent root phase, do not touch them.
return;
}
@ -1779,15 +1779,16 @@ void ShenandoahHeap::op_roots() {
workers()->run_task(&task);
_unloader.unload();
set_concurrent_weak_root_in_progress(false);
}
if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
ShenandoahConcurrentRootsEvacUpdateTask task(!ShenandoahConcurrentRoots::should_do_concurrent_class_unloading());
workers()->run_task(&task);
}
set_concurrent_weak_root_in_progress(false);
set_concurrent_root_in_progress(false);
}
set_concurrent_root_in_progress(false);
}
class ShenandoahResetUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {
@ -2046,6 +2047,15 @@ void ShenandoahHeap::set_concurrent_root_in_progress(bool in_progress) {
}
}
void ShenandoahHeap::set_concurrent_weak_root_in_progress(bool in_progress) {
assert(ShenandoahConcurrentRoots::can_do_concurrent_roots(), "Why set the flag?");
if (in_progress) {
_concurrent_weak_root_in_progress.set();
} else {
_concurrent_weak_root_in_progress.unset();
}
}
void ShenandoahHeap::ref_processing_init() {
assert(_max_workers > 0, "Sanity");
@ -2324,6 +2334,7 @@ void ShenandoahHeap::prepare_concurrent_roots() {
assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
set_concurrent_root_in_progress(true);
set_concurrent_weak_root_in_progress(true);
}
}

View File

@ -277,6 +277,7 @@ private:
ShenandoahSharedFlag _full_gc_move_in_progress;
ShenandoahSharedFlag _progress_last_gc;
ShenandoahSharedFlag _concurrent_root_in_progress;
ShenandoahSharedFlag _concurrent_weak_root_in_progress;
void set_gc_state_all_threads(char state);
void set_gc_state_mask(uint mask, bool value);
@ -293,6 +294,7 @@ public:
void set_full_gc_move_in_progress(bool in_progress);
void set_has_forwarded_objects(bool cond);
void set_concurrent_root_in_progress(bool cond);
void set_concurrent_weak_root_in_progress(bool cond);
inline bool is_stable() const;
inline bool is_idle() const;
@ -306,6 +308,7 @@ public:
inline bool is_gc_in_progress_mask(uint mask) const;
inline bool is_stw_gc_in_progress() const;
inline bool is_concurrent_root_in_progress() const;
inline bool is_concurrent_weak_root_in_progress() const;
// ---------- GC cancellation and degeneration machinery
//

View File

@ -382,6 +382,10 @@ inline bool ShenandoahHeap::is_concurrent_root_in_progress() const {
return _concurrent_root_in_progress.is_set();
}
inline bool ShenandoahHeap::is_concurrent_weak_root_in_progress() const {
return _concurrent_weak_root_in_progress.is_set();
}
template<class T>
inline void ShenandoahHeap::marked_object_iterate(ShenandoahHeapRegion* region, T* cl) {
marked_object_iterate(region, cl, region->top());

View File

@ -213,7 +213,7 @@ void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
ShenandoahKeepNMethodMetadataAliveClosure<false> cl;
data->oops_do(&cl);
}
} else if (heap->is_concurrent_root_in_progress()) {
} else if (heap->is_concurrent_weak_root_in_progress()) {
ShenandoahEvacOOMScope evac_scope;
ShenandoahEvacuateUpdateRootsClosure<> cl;
data->oops_do(&cl, true /*fix relocation*/);

View File

@ -166,7 +166,7 @@ public:
void ShenandoahUnload::unload() {
assert(ShenandoahConcurrentRoots::can_do_concurrent_class_unloading(), "Why we here?");
if (!ShenandoahHeap::heap()->is_concurrent_root_in_progress()) {
if (!ShenandoahHeap::heap()->is_concurrent_weak_root_in_progress()) {
return;
}