diff --git a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp index 5d00688f671..4f114690f53 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp @@ -121,10 +121,9 @@ inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators, return nullptr; } - // Prevent resurrection of unreachable objects that are visited during - // concurrent class-unloading. + // Allow runtime to see unreachable objects that are visited during concurrent class-unloading. if ((decorators & AS_NO_KEEPALIVE) != 0 && - _heap->is_evacuation_in_progress() && + _heap->is_concurrent_weak_root_in_progress() && !_heap->marking_context()->is_marked(obj)) { return obj; } diff --git a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp index 62150a20db8..29fd4002578 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp @@ -150,15 +150,22 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) { return false; } + assert(heap->is_concurrent_weak_root_in_progress(), "Must be doing weak roots now"); + // Concurrent stack processing if (heap->is_evacuation_in_progress()) { entry_thread_roots(); } - // Process weak roots that might still point to regions that would be broken by cleanup - if (heap->is_concurrent_weak_root_in_progress()) { - entry_weak_refs(); - entry_weak_roots(); + // Process weak roots that might still point to regions that would be broken by cleanup. + // We cannot recycle regions because weak roots need to know what is marked in trashed regions. + entry_weak_refs(); + entry_weak_roots(); + + // Perform concurrent class unloading before any regions get recycled. Class unloading may + // need to inspect unmarked objects in trashed regions. + if (heap->unload_classes()) { + entry_class_unloading(); } // Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim @@ -168,12 +175,6 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) { heap->free_set()->log_status_under_lock(); - // Perform concurrent class unloading - if (heap->unload_classes() && - heap->is_concurrent_weak_root_in_progress()) { - entry_class_unloading(); - } - // Processing strong roots // This may be skipped if there is nothing to update/evacuate. // If so, strong_root_in_progress would be unset. diff --git a/src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp b/src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp index 138e0320748..8a3eeb6a74d 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp @@ -999,7 +999,10 @@ HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, Shenandoah assert (has_alloc_capacity(r), "Performance: should avoid full regions on this path: %zu", r->index()); if (_heap->is_concurrent_weak_root_in_progress() && r->is_trash()) { // We cannot use this region for allocation when weak roots are in progress because the collector may need - // to reference unmarked oops during concurrent classunloading. + // to reference unmarked oops during concurrent classunloading. The collector also needs accurate marking + // information to determine which weak handles need to be null'd out. If the region is recycled before weak + // roots processing has finished, weak root processing may fail to null out a handle into a trashed region. + // This turns the handle into a dangling pointer and will crash or corrupt the heap. return nullptr; } HeapWord* result = nullptr; diff --git a/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp b/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp index fc1ff230688..ad09aecbe68 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp @@ -818,18 +818,31 @@ size_t ShenandoahHeap::initial_capacity() const { } bool ShenandoahHeap::is_in(const void* p) const { - if (is_in_reserved(p)) { - if (is_full_gc_move_in_progress()) { - // Full GC move is running, we do not have a consistent region - // information yet. But we know the pointer is in heap. - return true; - } - // Now check if we point to a live section in active region. - ShenandoahHeapRegion* r = heap_region_containing(p); - return (r->is_active() && p < r->top()); - } else { + if (!is_in_reserved(p)) { return false; } + + if (is_full_gc_move_in_progress()) { + // Full GC move is running, we do not have a consistent region + // information yet. But we know the pointer is in heap. + return true; + } + + // Now check if we point to a live section in active region. + const ShenandoahHeapRegion* r = heap_region_containing(p); + if (p >= r->top()) { + return false; + } + + if (r->is_active()) { + return true; + } + + // The region is trash, but won't be recycled until after concurrent weak + // roots. We also don't allow mutators to allocate from trash regions + // during weak roots. Concurrent class unloading may access unmarked oops + // in trash regions. + return r->is_trash() && is_concurrent_weak_root_in_progress(); } void ShenandoahHeap::notify_soft_max_changed() {