8369068: GenShen: Generations still aren't reconciled assertion failure

Reviewed-by: ysr, kdnilsen
This commit is contained in:
William Kemper 2025-10-23 19:06:47 +00:00
parent b0721e2859
commit b2e431a1cb
32 changed files with 361 additions and 401 deletions

View File

@ -127,7 +127,7 @@ void ShenandoahGenerationalHeuristics::choose_collection_set(ShenandoahCollectio
// Reclaim humongous regions here, and count them as the immediate garbage
#ifdef ASSERT
bool reg_live = region->has_live();
bool bm_live = heap->active_generation()->complete_marking_context()->is_marked(cast_to_oop(region->bottom()));
bool bm_live = _generation->complete_marking_context()->is_marked(cast_to_oop(region->bottom()));
assert(reg_live == bm_live,
"Humongous liveness and marks should agree. Region live: %s; Bitmap live: %s; Region Live Words: %zu",
BOOL_TO_STR(reg_live), BOOL_TO_STR(bm_live), region->get_live_data_words());

View File

@ -73,10 +73,11 @@ ShenandoahHeuristics::~ShenandoahHeuristics() {
}
void ShenandoahHeuristics::choose_collection_set(ShenandoahCollectionSet* collection_set) {
assert(collection_set->is_empty(), "Must be empty");
ShenandoahHeap* heap = ShenandoahHeap::heap();
assert(collection_set->is_empty(), "Must be empty");
assert(!heap->mode()->is_generational(), "Wrong heuristic for heap mode");
// Check all pinned regions have updated status before choosing the collection set.
heap->assert_pinned_region_status();
@ -120,7 +121,7 @@ void ShenandoahHeuristics::choose_collection_set(ShenandoahCollectionSet* collec
// Reclaim humongous regions here, and count them as the immediate garbage
#ifdef ASSERT
bool reg_live = region->has_live();
bool bm_live = heap->gc_generation()->complete_marking_context()->is_marked(cast_to_oop(region->bottom()));
bool bm_live = heap->global_generation()->complete_marking_context()->is_marked(cast_to_oop(region->bottom()));
assert(reg_live == bm_live,
"Humongous liveness and marks should agree. Region live: %s; Bitmap live: %s; Region Live Words: %zu",
BOOL_TO_STR(reg_live), BOOL_TO_STR(bm_live), region->get_live_data_words());

View File

@ -425,6 +425,16 @@ void ShenandoahAsserts::assert_marked_strong(void *interior_loc, oop obj, const
}
}
void ShenandoahAsserts::assert_mark_complete(HeapWord* obj, const char* file, int line) {
const ShenandoahHeap* heap = ShenandoahHeap::heap();
const ShenandoahHeapRegion* region = heap->heap_region_containing(obj);
const ShenandoahGeneration* generation = heap->generation_for(region->affiliation());
if (!generation->is_mark_complete()) {
ShenandoahMessageBuffer msg("Marking should be complete for object " PTR_FORMAT " in the %s generation", p2i(obj), generation->name());
report_vm_error(file, line, msg.buffer());
}
}
void ShenandoahAsserts::assert_in_cset(void* interior_loc, oop obj, const char* file, int line) {
assert_correct(interior_loc, obj, file, line);
@ -542,23 +552,6 @@ void ShenandoahAsserts::assert_control_or_vm_thread_at_safepoint(bool at_safepoi
report_vm_error(file, line, msg.buffer());
}
void ShenandoahAsserts::assert_generations_reconciled(const char* file, int line) {
if (!ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
// Only shenandoah safepoint operations participate in the active/gc generation scheme
return;
}
ShenandoahHeap* heap = ShenandoahHeap::heap();
ShenandoahGeneration* ggen = heap->gc_generation();
ShenandoahGeneration* agen = heap->active_generation();
if (agen == ggen) {
return;
}
ShenandoahMessageBuffer msg("Active(%s) & GC(%s) Generations aren't reconciled", agen->name(), ggen->name());
report_vm_error(file, line, msg.buffer());
}
bool ShenandoahAsserts::extract_klass_safely(oop obj, narrowKlass& nk, const Klass*& k) {
nk = 0;
k = nullptr;

View File

@ -65,6 +65,9 @@ public:
static void assert_marked(void* interior_loc, oop obj, const char* file, int line);
static void assert_marked_weak(void* interior_loc, oop obj, const char* file, int line);
static void assert_marked_strong(void* interior_loc, oop obj, const char* file, int line);
// Assert that marking is complete for the generation where this obj resides
static void assert_mark_complete(HeapWord* obj, const char* file, int line);
static void assert_in_cset(void* interior_loc, oop obj, const char* file, int line);
static void assert_not_in_cset(void* interior_loc, oop obj, const char* file, int line);
static void assert_not_in_cset_loc(void* interior_loc, const char* file, int line);
@ -76,7 +79,6 @@ public:
static void assert_heaplocked_or_safepoint(const char* file, int line);
static void assert_control_or_vm_thread_at_safepoint(bool at_safepoint, const char* file, int line);
static void assert_generational(const char* file, int line);
static void assert_generations_reconciled(const char* file, int line);
// Given a possibly invalid oop, extract narrowKlass (if UCCP) and Klass*
// from it safely.
@ -133,6 +135,9 @@ public:
#define shenandoah_assert_marked_strong(interior_loc, obj) \
ShenandoahAsserts::assert_marked_strong(interior_loc, obj, __FILE__, __LINE__)
#define shenandoah_assert_mark_complete(obj) \
ShenandoahAsserts::assert_mark_complete(obj, __FILE__, __LINE__)
#define shenandoah_assert_in_cset_if(interior_loc, obj, condition) \
if (condition) ShenandoahAsserts::assert_in_cset(interior_loc, obj, __FILE__, __LINE__)
#define shenandoah_assert_in_cset_except(interior_loc, obj, exception) \
@ -184,10 +189,6 @@ public:
#define shenandoah_assert_generational() \
ShenandoahAsserts::assert_generational(__FILE__, __LINE__)
// Some limited sanity checking of the _gc_generation and _active_generation fields of ShenandoahHeap
#define shenandoah_assert_generations_reconciled() \
ShenandoahAsserts::assert_generations_reconciled(__FILE__, __LINE__)
#else
#define shenandoah_assert_in_heap_bounds(interior_loc, obj)
#define shenandoah_assert_in_heap_bounds_or_null(interior_loc, obj)
@ -217,6 +218,8 @@ public:
#define shenandoah_assert_marked_strong_except(interior_loc, obj, exception)
#define shenandoah_assert_marked_strong(interior_loc, obj)
#define shenandoah_assert_mark_complete(obj)
#define shenandoah_assert_in_cset_if(interior_loc, obj, condition)
#define shenandoah_assert_in_cset_except(interior_loc, obj, exception)
#define shenandoah_assert_in_cset(interior_loc, obj)
@ -241,7 +244,6 @@ public:
#define shenandoah_assert_control_or_vm_thread()
#define shenandoah_assert_control_or_vm_thread_at_safepoint()
#define shenandoah_assert_generational()
#define shenandoah_assert_generations_reconciled()
#endif

View File

@ -91,8 +91,8 @@ public:
};
ShenandoahConcurrentGC::ShenandoahConcurrentGC(ShenandoahGeneration* generation, bool do_old_gc_bootstrap) :
ShenandoahGC(generation),
_mark(generation),
_generation(generation),
_degen_point(ShenandoahDegenPoint::_degenerated_unset),
_abbreviated(false),
_do_old_gc_bootstrap(do_old_gc_bootstrap) {
@ -576,7 +576,7 @@ void ShenandoahConcurrentGC::entry_promote_in_place() const {
ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::promote_in_place);
EventMark em("%s", "Promote in place");
ShenandoahGenerationalHeap::heap()->promote_regions_in_place(true);
ShenandoahGenerationalHeap::heap()->promote_regions_in_place(_generation, true);
}
void ShenandoahConcurrentGC::entry_update_thread_roots() {
@ -706,7 +706,7 @@ void ShenandoahConcurrentGC::op_init_mark() {
if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::init_mark_verify);
heap->verifier()->verify_before_concmark();
heap->verifier()->verify_before_concmark(_generation);
}
if (VerifyBeforeGC) {
@ -763,7 +763,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");
if (ShenandoahVerify) {
heap->verifier()->verify_roots_no_forwarded();
heap->verifier()->verify_roots_no_forwarded(_generation);
}
if (!heap->cancelled_gc()) {
@ -791,7 +791,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
heap->verifier()->verify_before_evacuation();
heap->verifier()->verify_before_evacuation(_generation);
}
heap->set_evacuation_in_progress(true);
@ -806,9 +806,9 @@ void ShenandoahConcurrentGC::op_final_mark() {
if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
if (has_in_place_promotions(heap)) {
heap->verifier()->verify_after_concmark_with_promotions();
heap->verifier()->verify_after_concmark_with_promotions(_generation);
} else {
heap->verifier()->verify_after_concmark();
heap->verifier()->verify_after_concmark(_generation);
}
}
}
@ -877,18 +877,20 @@ void ShenandoahConcurrentGC::op_weak_refs() {
class ShenandoahEvacUpdateCleanupOopStorageRootsClosure : public BasicOopIterateClosure {
private:
ShenandoahHeap* const _heap;
ShenandoahGeneration* const _generation;
ShenandoahMarkingContext* const _mark_context;
bool _evac_in_progress;
Thread* const _thread;
public:
ShenandoahEvacUpdateCleanupOopStorageRootsClosure();
explicit ShenandoahEvacUpdateCleanupOopStorageRootsClosure(ShenandoahGeneration* generation);
void do_oop(oop* p);
void do_oop(narrowOop* p);
};
ShenandoahEvacUpdateCleanupOopStorageRootsClosure::ShenandoahEvacUpdateCleanupOopStorageRootsClosure() :
ShenandoahEvacUpdateCleanupOopStorageRootsClosure::ShenandoahEvacUpdateCleanupOopStorageRootsClosure(ShenandoahGeneration* generation) :
_heap(ShenandoahHeap::heap()),
_generation(generation),
_mark_context(ShenandoahHeap::heap()->marking_context()),
_evac_in_progress(ShenandoahHeap::heap()->is_evacuation_in_progress()),
_thread(Thread::current()) {
@ -898,8 +900,7 @@ void ShenandoahEvacUpdateCleanupOopStorageRootsClosure::do_oop(oop* p) {
const oop obj = RawAccess<>::oop_load(p);
if (!CompressedOops::is_null(obj)) {
if (!_mark_context->is_marked(obj)) {
shenandoah_assert_generations_reconciled();
if (_heap->is_in_active_generation(obj)) {
if (_generation->contains(obj)) {
// Note: The obj is dead here. Do not touch it, just clear.
ShenandoahHeap::atomic_clear_oop(p, obj);
}
@ -942,14 +943,16 @@ private:
ShenandoahClassLoaderDataRoots<true /* concurrent */>
_cld_roots;
ShenandoahConcurrentNMethodIterator _nmethod_itr;
ShenandoahGeneration* _generation;
ShenandoahPhaseTimings::Phase _phase;
public:
ShenandoahConcurrentWeakRootsEvacUpdateTask(ShenandoahPhaseTimings::Phase phase) :
ShenandoahConcurrentWeakRootsEvacUpdateTask(ShenandoahGeneration* generation, ShenandoahPhaseTimings::Phase phase) :
WorkerTask("Shenandoah Evacuate/Update Concurrent Weak Roots"),
_vm_roots(phase),
_cld_roots(phase, ShenandoahHeap::heap()->workers()->active_workers(), false /*heap iteration*/),
_nmethod_itr(ShenandoahCodeRoots::table()),
_generation(generation),
_phase(phase) {}
~ShenandoahConcurrentWeakRootsEvacUpdateTask() {
@ -957,14 +960,14 @@ public:
_vm_roots.report_num_dead();
}
void work(uint worker_id) {
void work(uint worker_id) override {
ShenandoahConcurrentWorkerSession worker_session(worker_id);
ShenandoahSuspendibleThreadSetJoiner sts_join;
{
ShenandoahEvacOOMScope oom;
// jni_roots and weak_roots are OopStorage backed roots, concurrent iteration
// may race against OopStorage::release() calls.
ShenandoahEvacUpdateCleanupOopStorageRootsClosure cl;
ShenandoahEvacUpdateCleanupOopStorageRootsClosure cl(_generation);
_vm_roots.oops_do(&cl, worker_id);
}
@ -999,7 +1002,7 @@ void ShenandoahConcurrentGC::op_weak_roots() {
// Concurrent weak root processing
ShenandoahTimingsTracker t(ShenandoahPhaseTimings::conc_weak_roots_work);
ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_weak_roots_work);
ShenandoahConcurrentWeakRootsEvacUpdateTask task(ShenandoahPhaseTimings::conc_weak_roots_work);
ShenandoahConcurrentWeakRootsEvacUpdateTask task(_generation, ShenandoahPhaseTimings::conc_weak_roots_work);
heap->workers()->run_task(&task);
}
@ -1105,19 +1108,19 @@ void ShenandoahConcurrentGC::op_cleanup_early() {
}
void ShenandoahConcurrentGC::op_evacuate() {
ShenandoahHeap::heap()->evacuate_collection_set(true /*concurrent*/);
ShenandoahHeap::heap()->evacuate_collection_set(_generation, true /*concurrent*/);
}
void ShenandoahConcurrentGC::op_init_update_refs() {
ShenandoahHeap* const heap = ShenandoahHeap::heap();
if (ShenandoahVerify) {
ShenandoahHeap* const heap = ShenandoahHeap::heap();
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::init_update_refs_verify);
heap->verifier()->verify_before_update_refs();
heap->verifier()->verify_before_update_refs(_generation);
}
}
void ShenandoahConcurrentGC::op_update_refs() {
ShenandoahHeap::heap()->update_heap_references(true /*concurrent*/);
ShenandoahHeap::heap()->update_heap_references(_generation, true /*concurrent*/);
}
class ShenandoahUpdateThreadHandshakeClosure : public HandshakeClosure {
@ -1163,7 +1166,7 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
// Has to be done before cset is clear
if (ShenandoahVerify) {
heap->verifier()->verify_roots_in_to_space();
heap->verifier()->verify_roots_in_to_space(_generation);
}
// If we are running in generational mode and this is an aging cycle, this will also age active
@ -1198,7 +1201,7 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
if (ShenandoahVerify) {
ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_update_refs_verify);
heap->verifier()->verify_after_update_refs();
heap->verifier()->verify_after_update_refs(_generation);
}
if (VerifyAfterGC) {

View File

@ -47,7 +47,6 @@ class ShenandoahConcurrentGC : public ShenandoahGC {
protected:
ShenandoahConcurrentMark _mark;
ShenandoahGeneration* const _generation;
private:
ShenandoahDegenPoint _degen_point;

View File

@ -56,18 +56,11 @@ public:
}
void work(uint worker_id) {
ShenandoahHeap* heap = ShenandoahHeap::heap();
ShenandoahConcurrentWorkerSession worker_session(worker_id);
ShenandoahWorkerTimingsTracker timer(ShenandoahPhaseTimings::conc_mark, ShenandoahPhaseTimings::ParallelMark, worker_id, true);
ShenandoahSuspendibleThreadSetJoiner stsj;
// Do not use active_generation() : we must use the gc_generation() set by
// ShenandoahGCScope on the ControllerThread's stack; no safepoint may
// intervene to update active_generation, so we can't
// shenandoah_assert_generations_reconciled() here.
ShenandoahReferenceProcessor* rp = heap->gc_generation()->ref_processor();
assert(rp != nullptr, "need reference processor");
StringDedup::Requests requests;
_cm->mark_loop(worker_id, _terminator, rp, GENERATION, true /*cancellable*/,
_cm->mark_loop(worker_id, _terminator, GENERATION, true /*cancellable*/,
ShenandoahStringDedup::is_enabled() ? ENQUEUE_DEDUP : NO_DEDUP,
&requests);
}
@ -106,9 +99,6 @@ public:
ShenandoahParallelWorkerSession worker_session(worker_id);
StringDedup::Requests requests;
ShenandoahReferenceProcessor* rp = heap->gc_generation()->ref_processor();
shenandoah_assert_generations_reconciled();
// First drain remaining SATB buffers.
{
ShenandoahObjToScanQueue* q = _cm->get_queue(worker_id);
@ -122,7 +112,7 @@ public:
ShenandoahSATBAndRemarkThreadsClosure tc(satb_mq_set);
Threads::possibly_parallel_threads_do(true /* is_par */, &tc);
}
_cm->mark_loop(worker_id, _terminator, rp, GENERATION, false /*not cancellable*/,
_cm->mark_loop(worker_id, _terminator, GENERATION, false /*not cancellable*/,
_dedup_string ? ENQUEUE_DEDUP : NO_DEDUP,
&requests);
assert(_cm->task_queues()->is_empty(), "Should be empty");

View File

@ -46,9 +46,8 @@
#include "utilities/events.hpp"
ShenandoahDegenGC::ShenandoahDegenGC(ShenandoahDegenPoint degen_point, ShenandoahGeneration* generation) :
ShenandoahGC(),
ShenandoahGC(generation),
_degen_point(degen_point),
_generation(generation),
_abbreviated(false) {
}
@ -260,7 +259,7 @@ void ShenandoahDegenGC::op_degenerated() {
} else if (has_in_place_promotions(heap)) {
// We have nothing to evacuate, but there are still regions to promote in place.
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_promote_regions);
ShenandoahGenerationalHeap::heap()->promote_regions_in_place(false /* concurrent*/);
ShenandoahGenerationalHeap::heap()->promote_regions_in_place(_generation, false /* concurrent*/);
}
// Update collector state regardless of whether there are forwarded objects
@ -300,7 +299,7 @@ void ShenandoahDegenGC::op_degenerated() {
}
if (ShenandoahVerify) {
heap->verifier()->verify_after_degenerated();
heap->verifier()->verify_after_degenerated(_generation);
}
if (VerifyAfterGC) {
@ -337,11 +336,11 @@ void ShenandoahDegenGC::op_finish_mark() {
void ShenandoahDegenGC::op_prepare_evacuation() {
ShenandoahHeap* const heap = ShenandoahHeap::heap();
if (ShenandoahVerify) {
heap->verifier()->verify_roots_no_forwarded();
heap->verifier()->verify_roots_no_forwarded(_generation);
}
// STW cleanup weak roots and unload classes
heap->parallel_cleaning(false /*full gc*/);
heap->parallel_cleaning(_generation, false /*full gc*/);
// Prepare regions and collection set
_generation->prepare_regions_and_collection_set(false /*concurrent*/);
@ -358,7 +357,7 @@ void ShenandoahDegenGC::op_prepare_evacuation() {
if (!heap->collection_set()->is_empty()) {
if (ShenandoahVerify) {
heap->verifier()->verify_before_evacuation();
heap->verifier()->verify_before_evacuation(_generation);
}
heap->set_evacuation_in_progress(true);
@ -366,9 +365,9 @@ void ShenandoahDegenGC::op_prepare_evacuation() {
} else {
if (ShenandoahVerify) {
if (has_in_place_promotions(heap)) {
heap->verifier()->verify_after_concmark_with_promotions();
heap->verifier()->verify_after_concmark_with_promotions(_generation);
} else {
heap->verifier()->verify_after_concmark();
heap->verifier()->verify_after_concmark(_generation);
}
}
@ -388,7 +387,7 @@ void ShenandoahDegenGC::op_cleanup_early() {
void ShenandoahDegenGC::op_evacuate() {
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_evac);
ShenandoahHeap::heap()->evacuate_collection_set(false /* concurrent*/);
ShenandoahHeap::heap()->evacuate_collection_set(_generation, false /* concurrent*/);
}
void ShenandoahDegenGC::op_init_update_refs() {
@ -402,7 +401,7 @@ void ShenandoahDegenGC::op_update_refs() {
ShenandoahHeap* const heap = ShenandoahHeap::heap();
ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_update_refs);
// Handed over from concurrent update references phase
heap->update_heap_references(false /*concurrent*/);
heap->update_heap_references(_generation, false /*concurrent*/);
heap->set_update_refs_in_progress(false);
heap->set_has_forwarded_objects(false);
@ -416,7 +415,7 @@ void ShenandoahDegenGC::op_update_roots() {
heap->update_heap_region_states(false /*concurrent*/);
if (ShenandoahVerify) {
heap->verifier()->verify_after_update_refs();
heap->verifier()->verify_after_update_refs(_generation);
}
if (VerifyAfterGC) {

View File

@ -34,12 +34,11 @@ class ShenandoahDegenGC : public ShenandoahGC {
friend class VM_ShenandoahDegeneratedGC;
private:
const ShenandoahDegenPoint _degen_point;
ShenandoahGeneration* _generation;
bool _abbreviated;
public:
ShenandoahDegenGC(ShenandoahDegenPoint degen_point, ShenandoahGeneration* generation);
bool collect(GCCause::Cause cause);
bool collect(GCCause::Cause cause) override;
private:
void vmop_degenerated();

View File

@ -68,6 +68,7 @@
#include "utilities/growableArray.hpp"
ShenandoahFullGC::ShenandoahFullGC() :
ShenandoahGC(ShenandoahHeap::heap()->global_generation()),
_gc_timer(ShenandoahHeap::heap()->gc_timer()),
_preserved_marks(new PreservedMarksSet(true)) {}
@ -124,7 +125,7 @@ void ShenandoahFullGC::op_full(GCCause::Cause cause) {
}
// Regardless if progress was made, we record that we completed a "successful" full GC.
heap->global_generation()->heuristics()->record_success_full();
_generation->heuristics()->record_success_full();
heap->shenandoah_policy()->record_success_full();
{
@ -141,7 +142,7 @@ void ShenandoahFullGC::do_it(GCCause::Cause gc_cause) {
}
if (ShenandoahVerify) {
heap->verifier()->verify_before_fullgc();
heap->verifier()->verify_before_fullgc(_generation);
}
if (VerifyBeforeGC) {
@ -194,7 +195,7 @@ void ShenandoahFullGC::do_it(GCCause::Cause gc_cause) {
}
// d. Abandon reference discovery and clear all discovered references.
ShenandoahReferenceProcessor* rp = heap->global_generation()->ref_processor();
ShenandoahReferenceProcessor* rp = _generation->ref_processor();
rp->abandon_partial_discovery();
// e. Sync pinned region status from the CP marks
@ -273,7 +274,7 @@ void ShenandoahFullGC::do_it(GCCause::Cause gc_cause) {
heap->set_full_gc_in_progress(false);
if (ShenandoahVerify) {
heap->verifier()->verify_after_fullgc();
heap->verifier()->verify_after_fullgc(_generation);
}
if (VerifyAfterGC) {
@ -292,19 +293,19 @@ void ShenandoahFullGC::phase1_mark_heap() {
ShenandoahHeap* heap = ShenandoahHeap::heap();
heap->global_generation()->reset_mark_bitmap<true, true>();
_generation->reset_mark_bitmap<true, true>();
assert(heap->marking_context()->is_bitmap_clear(), "sanity");
assert(!heap->global_generation()->is_mark_complete(), "sanity");
assert(!_generation->is_mark_complete(), "sanity");
heap->set_unload_classes(heap->global_generation()->heuristics()->can_unload_classes());
heap->set_unload_classes(_generation->heuristics()->can_unload_classes());
ShenandoahReferenceProcessor* rp = heap->global_generation()->ref_processor();
ShenandoahReferenceProcessor* rp = _generation->ref_processor();
// enable ("weak") refs discovery
rp->set_soft_reference_policy(true); // forcefully purge all soft references
ShenandoahSTWMark mark(heap->global_generation(), true /*full_gc*/);
ShenandoahSTWMark mark(_generation, true /*full_gc*/);
mark.mark();
heap->parallel_cleaning(true /* full_gc */);
heap->parallel_cleaning(_generation, true /* full_gc */);
if (ShenandoahHeap::heap()->mode()->is_generational()) {
ShenandoahGenerationalFullGC::log_live_in_old(heap);
@ -350,10 +351,12 @@ public:
return _empty_regions_pos;
}
void do_object(oop p) {
void do_object(oop p) override {
shenandoah_assert_mark_complete(cast_from_oop<HeapWord*>(p));
assert(_from_region != nullptr, "must set before work");
assert(_heap->gc_generation()->complete_marking_context()->is_marked(p), "must be marked");
assert(!_heap->gc_generation()->complete_marking_context()->allocated_after_mark_start(p), "must be truly marked");
assert(_heap->global_generation()->is_mark_complete(), "marking must be finished");
assert(_heap->marking_context()->is_marked(p), "must be marked");
assert(!_heap->marking_context()->allocated_after_mark_start(p), "must be truly marked");
size_t obj_size = p->size();
if (_compact_point + obj_size > _to_region->end()) {
@ -523,12 +526,8 @@ void ShenandoahFullGC::calculate_target_humongous_objects() {
}
class ShenandoahEnsureHeapActiveClosure: public ShenandoahHeapRegionClosure {
private:
ShenandoahHeap* const _heap;
public:
ShenandoahEnsureHeapActiveClosure() : _heap(ShenandoahHeap::heap()) {}
void heap_region_do(ShenandoahHeapRegion* r) {
void heap_region_do(ShenandoahHeapRegion* r) override {
if (r->is_trash()) {
r->try_recycle_under_lock();
}
@ -760,7 +759,6 @@ void ShenandoahFullGC::phase2_calculate_target_addresses(ShenandoahHeapRegionSet
class ShenandoahAdjustPointersClosure : public MetadataVisitingOopIterateClosure {
private:
ShenandoahHeap* const _heap;
ShenandoahMarkingContext* const _ctx;
template <class T>
@ -778,8 +776,7 @@ private:
public:
ShenandoahAdjustPointersClosure() :
_heap(ShenandoahHeap::heap()),
_ctx(ShenandoahHeap::heap()->gc_generation()->complete_marking_context()) {}
_ctx(ShenandoahHeap::heap()->global_generation()->complete_marking_context()) {}
void do_oop(oop* p) { do_oop_work(p); }
void do_oop(narrowOop* p) { do_oop_work(p); }
@ -789,15 +786,12 @@ public:
class ShenandoahAdjustPointersObjectClosure : public ObjectClosure {
private:
ShenandoahHeap* const _heap;
ShenandoahAdjustPointersClosure _cl;
public:
ShenandoahAdjustPointersObjectClosure() :
_heap(ShenandoahHeap::heap()) {
}
void do_object(oop p) {
assert(_heap->gc_generation()->complete_marking_context()->is_marked(p), "must be marked");
void do_object(oop p) override {
assert(ShenandoahHeap::heap()->global_generation()->is_mark_complete(), "marking must be complete");
assert(ShenandoahHeap::heap()->marking_context()->is_marked(p), "must be marked");
p->oop_iterate(&_cl);
}
};
@ -813,7 +807,7 @@ public:
_heap(ShenandoahHeap::heap()) {
}
void work(uint worker_id) {
void work(uint worker_id) override {
ShenandoahParallelWorkerSession worker_session(worker_id);
ShenandoahAdjustPointersObjectClosure obj_cl;
ShenandoahHeapRegion* r = _regions.next();
@ -839,7 +833,7 @@ public:
_rp(rp),
_preserved_marks(preserved_marks) {}
void work(uint worker_id) {
void work(uint worker_id) override {
ShenandoahParallelWorkerSession worker_session(worker_id);
ShenandoahAdjustPointersClosure cl;
_rp->roots_do(worker_id, &cl);
@ -873,15 +867,15 @@ void ShenandoahFullGC::phase3_update_references() {
class ShenandoahCompactObjectsClosure : public ObjectClosure {
private:
ShenandoahHeap* const _heap;
uint const _worker_id;
uint const _worker_id;
public:
ShenandoahCompactObjectsClosure(uint worker_id) :
_heap(ShenandoahHeap::heap()), _worker_id(worker_id) {}
explicit ShenandoahCompactObjectsClosure(uint worker_id) :
_worker_id(worker_id) {}
void do_object(oop p) {
assert(_heap->gc_generation()->complete_marking_context()->is_marked(p), "must be marked");
void do_object(oop p) override {
assert(ShenandoahHeap::heap()->global_generation()->is_mark_complete(), "marking must be finished");
assert(ShenandoahHeap::heap()->marking_context()->is_marked(p), "must be marked");
size_t size = p->size();
if (FullGCForwarding::is_forwarded(p)) {
HeapWord* compact_from = cast_from_oop<HeapWord*>(p);
@ -908,7 +902,7 @@ public:
_worker_slices(worker_slices) {
}
void work(uint worker_id) {
void work(uint worker_id) override {
ShenandoahParallelWorkerSession worker_session(worker_id);
ShenandoahHeapRegionSetIterator slice(_worker_slices[worker_id]);
@ -945,7 +939,7 @@ public:
_heap->free_set()->clear();
}
void heap_region_do(ShenandoahHeapRegion* r) {
void heap_region_do(ShenandoahHeapRegion* r) override {
assert (!r->is_cset(), "cset regions should have been demoted already");
// Need to reset the complete-top-at-mark-start pointer here because
@ -954,7 +948,7 @@ public:
// NOTE: See blurb at ShenandoahMCResetCompleteBitmapTask on why we need to skip
// pinned regions.
if (!r->is_pinned()) {
_heap->gc_generation()->complete_marking_context()->reset_top_at_mark_start(r);
_heap->marking_context()->reset_top_at_mark_start(r);
}
size_t live = r->used();
@ -1079,7 +1073,7 @@ void ShenandoahFullGC::compact_humongous_objects() {
// we need to remain able to walk pinned regions.
// Since pinned region do not move and don't get compacted, we will get holes with
// unreachable objects in them (which may have pointers to unloaded Klasses and thus
// cannot be iterated over using oop->size(). The only way to safely iterate over those is using
// cannot be iterated over using oop->size()). The only way to safely iterate over those is using
// a valid marking bitmap and valid TAMS pointer. This class only resets marking
// bitmaps for un-pinned regions, and later we only reset TAMS for unpinned regions.
class ShenandoahMCResetCompleteBitmapTask : public WorkerTask {
@ -1091,11 +1085,12 @@ public:
WorkerTask("Shenandoah Reset Bitmap") {
}
void work(uint worker_id) {
void work(uint worker_id) override {
ShenandoahParallelWorkerSession worker_session(worker_id);
ShenandoahHeapRegion* region = _regions.next();
ShenandoahHeap* heap = ShenandoahHeap::heap();
ShenandoahMarkingContext* const ctx = heap->gc_generation()->complete_marking_context();
ShenandoahMarkingContext* const ctx = heap->marking_context();
assert(heap->global_generation()->is_mark_complete(), "Marking must be complete");
while (region != nullptr) {
if (heap->is_bitmap_slice_committed(region) && !region->is_pinned() && region->has_live()) {
ctx->clear_bitmap(region);
@ -1163,7 +1158,7 @@ ShenandoahGenerationalHeap::TransferResult ShenandoahFullGC::phase5_epilog() {
heap->free_set()->finish_rebuild(young_cset_regions, old_cset_regions, num_old);
// Set mark incomplete because the marking bitmaps have been reset except pinned regions.
heap->global_generation()->set_mark_incomplete();
_generation->set_mark_incomplete();
heap->clear_cancelled_gc();
}

View File

@ -68,7 +68,7 @@ private:
public:
ShenandoahFullGC();
~ShenandoahFullGC();
bool collect(GCCause::Cause cause);
bool collect(GCCause::Cause cause) override;
private:
// GC entries

View File

@ -44,6 +44,8 @@
* Full GC --------> (finish)
*/
class ShenandoahGeneration;
class ShenandoahGC : public StackObj {
public:
// Fail point from concurrent GC
@ -57,12 +59,17 @@ public:
_DEGENERATED_LIMIT
};
explicit ShenandoahGC(ShenandoahGeneration* generation) : _generation(generation) {}
// Returns false if the collection was cancelled, true otherwise.
virtual bool collect(GCCause::Cause cause) = 0;
static const char* degen_point_to_string(ShenandoahDegenPoint point);
ShenandoahGeneration* generation() const { return _generation; }
protected:
static void update_roots(bool full_gc);
ShenandoahGeneration* _generation;
};
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHGC_HPP

View File

@ -203,7 +203,7 @@ private:
bool is_bitmap_clear();
// We need to track the status of marking for different generations.
bool is_mark_complete() { return _is_marking_complete.is_set(); }
bool is_mark_complete() const { return _is_marking_complete.is_set(); }
virtual void set_mark_complete();
virtual void set_mark_incomplete();

View File

@ -50,10 +50,12 @@ public:
};
ShenandoahGenerationalEvacuationTask::ShenandoahGenerationalEvacuationTask(ShenandoahGenerationalHeap* heap,
ShenandoahGeneration* generation,
ShenandoahRegionIterator* iterator,
bool concurrent, bool only_promote_regions) :
WorkerTask("Shenandoah Evacuation"),
_heap(heap),
_generation(generation),
_regions(iterator),
_concurrent(concurrent),
_only_promote_regions(only_promote_regions)
@ -169,13 +171,12 @@ void ShenandoahGenerationalEvacuationTask::maybe_promote_region(ShenandoahHeapRe
// We identify the entirety of the region as DIRTY to force the next remembered set scan to identify the "interesting pointers"
// contained herein.
void ShenandoahGenerationalEvacuationTask::promote_in_place(ShenandoahHeapRegion* region) {
assert(!_heap->gc_generation()->is_old(), "Sanity check");
assert(!_generation->is_old(), "Sanity check");
ShenandoahMarkingContext* const marking_context = _heap->young_generation()->complete_marking_context();
HeapWord* const tams = marking_context->top_at_mark_start(region);
{
const size_t old_garbage_threshold = (ShenandoahHeapRegion::region_size_bytes() * ShenandoahOldGarbageThreshold) / 100;
shenandoah_assert_generations_reconciled();
assert(!_heap->is_concurrent_old_mark_in_progress(), "Cannot promote in place during old marking");
assert(region->garbage_before_padded_for_promote() < old_garbage_threshold, "Region %zu has too much garbage for promotion", region->index());
assert(region->is_young(), "Only young regions can be promoted");
@ -259,8 +260,7 @@ void ShenandoahGenerationalEvacuationTask::promote_in_place(ShenandoahHeapRegion
void ShenandoahGenerationalEvacuationTask::promote_humongous(ShenandoahHeapRegion* region) {
ShenandoahMarkingContext* marking_context = _heap->marking_context();
oop obj = cast_to_oop(region->bottom());
assert(_heap->gc_generation()->is_mark_complete(), "sanity");
shenandoah_assert_generations_reconciled();
assert(_generation->is_mark_complete(), "sanity");
assert(region->is_young(), "Only young regions can be promoted");
assert(region->is_humongous_start(), "Should not promote humongous continuation in isolation");
assert(_heap->is_tenurable(region), "Only promote regions that are sufficiently aged");

View File

@ -36,12 +36,14 @@ class ShenandoahRegionIterator;
class ShenandoahGenerationalEvacuationTask : public WorkerTask {
private:
ShenandoahGenerationalHeap* const _heap;
ShenandoahGeneration* const _generation;
ShenandoahRegionIterator* _regions;
bool _concurrent;
bool _only_promote_regions;
public:
ShenandoahGenerationalEvacuationTask(ShenandoahGenerationalHeap* sh,
ShenandoahGeneration* generation,
ShenandoahRegionIterator* iterator,
bool concurrent, bool only_promote_regions);
void work(uint worker_id) override;

View File

@ -53,8 +53,7 @@ void assert_usage_not_more_than_regions_used(ShenandoahGeneration* generation) {
void ShenandoahGenerationalFullGC::prepare() {
auto heap = ShenandoahGenerationalHeap::heap();
// Since we may arrive here from degenerated GC failure of either young or old, establish generation as GLOBAL.
heap->set_gc_generation(heap->global_generation());
heap->set_active_generation();
heap->set_active_generation(heap->global_generation());
// No need for old_gen->increase_used() as this was done when plabs were allocated.
heap->reset_generation_reserves();

View File

@ -178,15 +178,15 @@ bool ShenandoahGenerationalHeap::requires_barriers(stackChunkOop obj) const {
return false;
}
void ShenandoahGenerationalHeap::evacuate_collection_set(bool concurrent) {
void ShenandoahGenerationalHeap::evacuate_collection_set(ShenandoahGeneration* generation, bool concurrent) {
ShenandoahRegionIterator regions;
ShenandoahGenerationalEvacuationTask task(this, &regions, concurrent, false /* only promote regions */);
ShenandoahGenerationalEvacuationTask task(this, generation, &regions, concurrent, false /* only promote regions */);
workers()->run_task(&task);
}
void ShenandoahGenerationalHeap::promote_regions_in_place(bool concurrent) {
void ShenandoahGenerationalHeap::promote_regions_in_place(ShenandoahGeneration* generation, bool concurrent) {
ShenandoahRegionIterator regions;
ShenandoahGenerationalEvacuationTask task(this, &regions, concurrent, true /* only promote regions */);
ShenandoahGenerationalEvacuationTask task(this, generation, &regions, concurrent, true /* only promote regions */);
workers()->run_task(&task);
}
@ -757,23 +757,27 @@ void ShenandoahGenerationalHeap::coalesce_and_fill_old_regions(bool concurrent)
template<bool CONCURRENT>
class ShenandoahGenerationalUpdateHeapRefsTask : public WorkerTask {
private:
// For update refs, _generation will be young or global. Mixed collections use the young generation.
ShenandoahGeneration* _generation;
ShenandoahGenerationalHeap* _heap;
ShenandoahRegionIterator* _regions;
ShenandoahRegionChunkIterator* _work_chunks;
public:
explicit ShenandoahGenerationalUpdateHeapRefsTask(ShenandoahRegionIterator* regions,
ShenandoahRegionChunkIterator* work_chunks) :
ShenandoahGenerationalUpdateHeapRefsTask(ShenandoahGeneration* generation,
ShenandoahRegionIterator* regions,
ShenandoahRegionChunkIterator* work_chunks) :
WorkerTask("Shenandoah Update References"),
_generation(generation),
_heap(ShenandoahGenerationalHeap::heap()),
_regions(regions),
_work_chunks(work_chunks)
{
bool old_bitmap_stable = _heap->old_generation()->is_mark_complete();
const bool old_bitmap_stable = _heap->old_generation()->is_mark_complete();
log_debug(gc, remset)("Update refs, scan remembered set using bitmap: %s", BOOL_TO_STR(old_bitmap_stable));
}
void work(uint worker_id) {
void work(uint worker_id) override {
if (CONCURRENT) {
ShenandoahConcurrentWorkerSession worker_session(worker_id);
ShenandoahSuspendibleThreadSetJoiner stsj;
@ -803,10 +807,8 @@ private:
// If !CONCURRENT, there's no value in expanding Mutator free set
ShenandoahHeapRegion* r = _regions->next();
// We update references for global, old, and young collections.
ShenandoahGeneration* const gc_generation = _heap->gc_generation();
shenandoah_assert_generations_reconciled();
assert(gc_generation->is_mark_complete(), "Expected complete marking");
// We update references for global, mixed, and young collections.
assert(_generation->is_mark_complete(), "Expected complete marking");
ShenandoahMarkingContext* const ctx = _heap->marking_context();
bool is_mixed = _heap->collection_set()->has_old_regions();
while (r != nullptr) {
@ -818,7 +820,7 @@ private:
if (r->is_young()) {
_heap->marked_object_oop_iterate(r, &cl, update_watermark);
} else if (r->is_old()) {
if (gc_generation->is_global()) {
if (_generation->is_global()) {
_heap->marked_object_oop_iterate(r, &cl, update_watermark);
}
@ -847,7 +849,7 @@ private:
r = _regions->next();
}
if (!gc_generation->is_global()) {
if (_generation->is_young()) {
// Since this is generational and not GLOBAL, we have to process the remembered set. There's no remembered
// set processing if not in generational mode or if GLOBAL mode.
@ -961,15 +963,15 @@ private:
}
};
void ShenandoahGenerationalHeap::update_heap_references(bool concurrent) {
void ShenandoahGenerationalHeap::update_heap_references(ShenandoahGeneration* generation, bool concurrent) {
assert(!is_full_gc_in_progress(), "Only for concurrent and degenerated GC");
const uint nworkers = workers()->active_workers();
ShenandoahRegionChunkIterator work_list(nworkers);
if (concurrent) {
ShenandoahGenerationalUpdateHeapRefsTask<true> task(&_update_refs_iterator, &work_list);
ShenandoahGenerationalUpdateHeapRefsTask<true> task(generation, &_update_refs_iterator, &work_list);
workers()->run_task(&task);
} else {
ShenandoahGenerationalUpdateHeapRefsTask<false> task(&_update_refs_iterator, &work_list);
ShenandoahGenerationalUpdateHeapRefsTask<false> task(generation, &_update_refs_iterator, &work_list);
workers()->run_task(&task);
}
@ -1044,7 +1046,7 @@ public:
void ShenandoahGenerationalHeap::final_update_refs_update_region_states() {
ShenandoahSynchronizePinnedRegionStates pins;
ShenandoahUpdateRegionAges ages(active_generation()->complete_marking_context());
ShenandoahUpdateRegionAges ages(marking_context());
auto cl = ShenandoahCompositeRegionClosure::of(pins, ages);
parallel_heap_region_iterate(&cl);
}

View File

@ -88,8 +88,11 @@ public:
oop evacuate_object(oop p, Thread* thread) override;
oop try_evacuate_object(oop p, Thread* thread, ShenandoahHeapRegion* from_region, ShenandoahAffiliation target_gen);
void evacuate_collection_set(bool concurrent) override;
void promote_regions_in_place(bool concurrent);
// In the generational mode, we will use these two functions for young, mixed, and global collections.
// For young and mixed, the generation argument will be the young generation, otherwise it will be the global generation.
void evacuate_collection_set(ShenandoahGeneration* generation, bool concurrent) override;
void promote_regions_in_place(ShenandoahGeneration* generation, bool concurrent);
size_t plab_min_size() const { return _min_plab_size; }
size_t plab_max_size() const { return _max_plab_size; }
@ -99,7 +102,9 @@ public:
// ---------- Update References
//
void update_heap_references(bool concurrent) override;
// In the generational mode, we will use this function for young, mixed, and global collections.
// For young and mixed, the generation argument will be the young generation, otherwise it will be the global generation.
void update_heap_references(ShenandoahGeneration* generation, bool concurrent) override;
void final_update_refs_update_region_states() override;
private:

View File

@ -529,7 +529,6 @@ void ShenandoahHeap::initialize_heuristics() {
ShenandoahHeap::ShenandoahHeap(ShenandoahCollectorPolicy* policy) :
CollectedHeap(),
_gc_generation(nullptr),
_active_generation(nullptr),
_initial_size(0),
_committed(0),
@ -1257,7 +1256,8 @@ private:
ShenandoahGCStatePropagatorHandshakeClosure _propagator;
};
void ShenandoahHeap::evacuate_collection_set(bool concurrent) {
void ShenandoahHeap::evacuate_collection_set(ShenandoahGeneration* generation, bool concurrent) {
assert(generation->is_global(), "Only global generation expected here");
ShenandoahEvacuationTask task(this, _collection_set, concurrent);
workers()->run_task(&task);
}
@ -1659,17 +1659,11 @@ void ShenandoahHeap::print_tracing_info() const {
}
}
void ShenandoahHeap::set_gc_generation(ShenandoahGeneration* generation) {
shenandoah_assert_control_or_vm_thread_at_safepoint();
_gc_generation = generation;
}
// Active generation may only be set by the VM thread at a safepoint.
void ShenandoahHeap::set_active_generation() {
void ShenandoahHeap::set_active_generation(ShenandoahGeneration* generation) {
assert(Thread::current()->is_VM_thread(), "Only the VM Thread");
assert(SafepointSynchronize::is_at_safepoint(), "Only at a safepoint!");
assert(_gc_generation != nullptr, "Will set _active_generation to nullptr");
_active_generation = _gc_generation;
_active_generation = generation;
}
void ShenandoahHeap::on_cycle_start(GCCause::Cause cause, ShenandoahGeneration* generation) {
@ -1678,17 +1672,14 @@ void ShenandoahHeap::on_cycle_start(GCCause::Cause cause, ShenandoahGeneration*
const GCCause::Cause current = gc_cause();
assert(current == GCCause::_no_gc, "Over-writing cause: %s, with: %s",
GCCause::to_string(current), GCCause::to_string(cause));
assert(_gc_generation == nullptr, "Over-writing _gc_generation");
set_gc_cause(cause);
set_gc_generation(generation);
generation->heuristics()->record_cycle_start();
}
void ShenandoahHeap::on_cycle_end(ShenandoahGeneration* generation) {
assert(gc_cause() != GCCause::_no_gc, "cause wasn't set");
assert(_gc_generation != nullptr, "_gc_generation wasn't set");
generation->heuristics()->record_cycle_end();
if (mode()->is_generational() && generation->is_global()) {
@ -1697,14 +1688,13 @@ void ShenandoahHeap::on_cycle_end(ShenandoahGeneration* generation) {
old_generation()->heuristics()->record_cycle_end();
}
set_gc_generation(nullptr);
set_gc_cause(GCCause::_no_gc);
}
void ShenandoahHeap::verify(VerifyOption vo) {
if (ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
if (ShenandoahVerify) {
verifier()->verify_generic(vo);
verifier()->verify_generic(active_generation(), vo);
} else {
// TODO: Consider allocating verification bitmaps on demand,
// and turn this on unconditionally.
@ -2064,14 +2054,13 @@ void ShenandoahHeap::do_class_unloading() {
}
}
void ShenandoahHeap::stw_weak_refs(bool full_gc) {
void ShenandoahHeap::stw_weak_refs(ShenandoahGeneration* generation, bool full_gc) {
// Weak refs processing
ShenandoahPhaseTimings::Phase phase = full_gc ? ShenandoahPhaseTimings::full_gc_weakrefs
: ShenandoahPhaseTimings::degen_gc_weakrefs;
ShenandoahTimingsTracker t(phase);
ShenandoahGCWorkerPhase worker_phase(phase);
shenandoah_assert_generations_reconciled();
gc_generation()->ref_processor()->process_references(phase, workers(), false /* concurrent */);
generation->ref_processor()->process_references(phase, workers(), false /* concurrent */);
}
void ShenandoahHeap::prepare_update_heap_references() {
@ -2312,13 +2301,13 @@ void ShenandoahHeap::stw_process_weak_roots(bool full_gc) {
}
}
void ShenandoahHeap::parallel_cleaning(bool full_gc) {
void ShenandoahHeap::parallel_cleaning(ShenandoahGeneration* generation, bool full_gc) {
assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
assert(is_stw_gc_in_progress(), "Only for Degenerated and Full GC");
ShenandoahGCPhase phase(full_gc ?
ShenandoahPhaseTimings::full_gc_purge :
ShenandoahPhaseTimings::degen_gc_purge);
stw_weak_refs(full_gc);
stw_weak_refs(generation, full_gc);
stw_process_weak_roots(full_gc);
stw_unload_classes(full_gc);
}
@ -2426,11 +2415,8 @@ void ShenandoahHeap::sync_pinned_region_status() {
void ShenandoahHeap::assert_pinned_region_status() {
for (size_t i = 0; i < num_regions(); i++) {
ShenandoahHeapRegion* r = get_region(i);
shenandoah_assert_generations_reconciled();
if (gc_generation()->contains(r)) {
assert((r->is_pinned() && r->pin_count() > 0) || (!r->is_pinned() && r->pin_count() == 0),
"Region %zu pinning status is inconsistent", i);
}
assert((r->is_pinned() && r->pin_count() > 0) || (!r->is_pinned() && r->pin_count() == 0),
"Region %zu pinning status is inconsistent", i);
}
}
#endif
@ -2533,7 +2519,8 @@ private:
}
};
void ShenandoahHeap::update_heap_references(bool concurrent) {
void ShenandoahHeap::update_heap_references(ShenandoahGeneration* generation, bool concurrent) {
assert(generation->is_global(), "Should only get global generation here");
assert(!is_full_gc_in_progress(), "Only for concurrent and degenerated GC");
if (concurrent) {

View File

@ -145,17 +145,10 @@ class ShenandoahHeap : public CollectedHeap {
private:
ShenandoahHeapLock _lock;
// Indicates the generation whose collection is in
// progress. Mutator threads aren't allowed to read
// this field.
ShenandoahGeneration* _gc_generation;
// This is set and cleared by only the VMThread
// at each STW pause (safepoint) to the value seen in
// _gc_generation. This allows the value to be always consistently
// at each STW pause (safepoint) to the value given to the VM operation.
// This allows the value to be always consistently
// seen by all mutators as well as all GC worker threads.
// In that sense, it's a stable snapshot of _gc_generation that is
// updated at each STW pause associated with a ShenandoahVMOp.
ShenandoahGeneration* _active_generation;
protected:
@ -167,25 +160,13 @@ public:
return &_lock;
}
ShenandoahGeneration* gc_generation() const {
// We don't want this field read by a mutator thread
assert(!Thread::current()->is_Java_thread(), "Not allowed");
// value of _gc_generation field, see above
return _gc_generation;
}
ShenandoahGeneration* active_generation() const {
// value of _active_generation field, see above
return _active_generation;
}
// Set the _gc_generation field
void set_gc_generation(ShenandoahGeneration* generation);
// Copy the value in the _gc_generation field into
// the _active_generation field: can only be called at
// a safepoint by the VMThread.
void set_active_generation();
// Update the _active_generation field: can only be called at a safepoint by the VMThread.
void set_active_generation(ShenandoahGeneration* generation);
ShenandoahHeuristics* heuristics();
@ -482,7 +463,7 @@ private:
// GC support
// Evacuation
virtual void evacuate_collection_set(bool concurrent);
virtual void evacuate_collection_set(ShenandoahGeneration* generation, bool concurrent);
// Concurrent root processing
void prepare_concurrent_roots();
void finish_concurrent_roots();
@ -497,7 +478,7 @@ private:
// Turn off weak roots flag, purge old satb buffers in generational mode
void concurrent_final_roots(HandshakeClosure* handshake_closure = nullptr);
virtual void update_heap_references(bool concurrent);
virtual void update_heap_references(ShenandoahGeneration* generation, bool concurrent);
// Final update region states
void update_heap_region_states(bool concurrent);
virtual void final_update_refs_update_region_states();
@ -605,12 +586,12 @@ public:
bool unload_classes() const;
// Perform STW class unloading and weak root cleaning
void parallel_cleaning(bool full_gc);
void parallel_cleaning(ShenandoahGeneration* generation, bool full_gc);
private:
void stw_unload_classes(bool full_gc);
void stw_process_weak_roots(bool full_gc);
void stw_weak_refs(bool full_gc);
void stw_weak_refs(ShenandoahGeneration* generation, bool full_gc);
inline void assert_lock_for_affiliation(ShenandoahAffiliation orig_affiliation,
ShenandoahAffiliation new_affiliation);

View File

@ -315,9 +315,9 @@ void ShenandoahHeapRegion::make_trash_immediate() {
// On this path, we know there are no marked objects in the region,
// tell marking context about it to bypass bitmap resets.
assert(ShenandoahHeap::heap()->gc_generation()->is_mark_complete(), "Marking should be complete here.");
shenandoah_assert_generations_reconciled();
ShenandoahHeap::heap()->marking_context()->reset_top_bitmap(this);
const ShenandoahHeap* heap = ShenandoahHeap::heap();
assert(heap->generation_for(affiliation())->is_mark_complete(), "Marking should be complete here.");
heap->marking_context()->reset_top_bitmap(this);
}
void ShenandoahHeapRegion::make_empty() {
@ -461,9 +461,9 @@ bool ShenandoahHeapRegion::oop_coalesce_and_fill(bool cancellable) {
ShenandoahGenerationalHeap* heap = ShenandoahGenerationalHeap::heap();
ShenandoahMarkingContext* marking_context = heap->marking_context();
// Expect marking to be completed before these threads invoke this service.
assert(heap->gc_generation()->is_mark_complete(), "sanity");
shenandoah_assert_generations_reconciled();
// Expect marking to be completed for the old generation before we fill in unmarked objects
assert(heap->old_generation()->is_mark_complete(), "sanity");
assert(is_old(), "Only need to coalesce and fill old regions");
// All objects above TAMS are considered live even though their mark bits will not be set. Note that young-
// gen evacuations that interrupt a long-running old-gen concurrent mark may promote objects into old-gen

View File

@ -55,10 +55,10 @@ ShenandoahMark::ShenandoahMark(ShenandoahGeneration* generation) :
}
template <ShenandoahGenerationType GENERATION, bool CANCELLABLE, StringDedupMode STRING_DEDUP>
void ShenandoahMark::mark_loop_prework(uint w, TaskTerminator *t, ShenandoahReferenceProcessor *rp, StringDedup::Requests* const req, bool update_refs) {
void ShenandoahMark::mark_loop_prework(uint w, TaskTerminator *t, StringDedup::Requests* const req, bool update_refs) {
ShenandoahObjToScanQueue* q = get_queue(w);
ShenandoahObjToScanQueue* old_q = get_old_queue(w);
ShenandoahReferenceProcessor *rp = _generation->ref_processor();
ShenandoahHeap* const heap = ShenandoahHeap::heap();
ShenandoahLiveData* ld = heap->get_liveness_cache(w);
@ -78,22 +78,22 @@ void ShenandoahMark::mark_loop_prework(uint w, TaskTerminator *t, ShenandoahRefe
}
template<bool CANCELLABLE, StringDedupMode STRING_DEDUP>
void ShenandoahMark::mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahReferenceProcessor *rp,
ShenandoahGenerationType generation, StringDedup::Requests* const req) {
void ShenandoahMark::mark_loop(uint worker_id, TaskTerminator* terminator,
ShenandoahGenerationType generation_type, StringDedup::Requests* const req) {
bool update_refs = ShenandoahHeap::heap()->has_forwarded_objects();
switch (generation) {
switch (generation_type) {
case YOUNG:
mark_loop_prework<YOUNG, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, rp, req, update_refs);
mark_loop_prework<YOUNG, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, req, update_refs);
break;
case OLD:
// Old generation collection only performs marking, it should not update references.
mark_loop_prework<OLD, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, rp, req, false);
mark_loop_prework<OLD, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, req, false);
break;
case GLOBAL:
mark_loop_prework<GLOBAL, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, rp, req, update_refs);
mark_loop_prework<GLOBAL, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, req, update_refs);
break;
case NON_GEN:
mark_loop_prework<NON_GEN, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, rp, req, update_refs);
mark_loop_prework<NON_GEN, CANCELLABLE, STRING_DEDUP>(worker_id, terminator, req, update_refs);
break;
default:
ShouldNotReachHere();
@ -101,30 +101,30 @@ void ShenandoahMark::mark_loop(uint worker_id, TaskTerminator* terminator, Shena
}
}
void ShenandoahMark::mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahReferenceProcessor *rp,
ShenandoahGenerationType generation, bool cancellable, StringDedupMode dedup_mode, StringDedup::Requests* const req) {
void ShenandoahMark::mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahGenerationType generation_type,
bool cancellable, StringDedupMode dedup_mode, StringDedup::Requests* const req) {
if (cancellable) {
switch(dedup_mode) {
case NO_DEDUP:
mark_loop<true, NO_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<true, NO_DEDUP>(worker_id, terminator, generation_type, req);
break;
case ENQUEUE_DEDUP:
mark_loop<true, ENQUEUE_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<true, ENQUEUE_DEDUP>(worker_id, terminator, generation_type, req);
break;
case ALWAYS_DEDUP:
mark_loop<true, ALWAYS_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<true, ALWAYS_DEDUP>(worker_id, terminator, generation_type, req);
break;
}
} else {
switch(dedup_mode) {
case NO_DEDUP:
mark_loop<false, NO_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<false, NO_DEDUP>(worker_id, terminator, generation_type, req);
break;
case ENQUEUE_DEDUP:
mark_loop<false, ENQUEUE_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<false, ENQUEUE_DEDUP>(worker_id, terminator, generation_type, req);
break;
case ALWAYS_DEDUP:
mark_loop<false, ALWAYS_DEDUP>(worker_id, terminator, rp, generation, req);
mark_loop<false, ALWAYS_DEDUP>(worker_id, terminator, generation_type, req);
break;
}
}
@ -139,12 +139,8 @@ void ShenandoahMark::mark_loop_work(T* cl, ShenandoahLiveData* live_data, uint w
ShenandoahObjToScanQueue* q;
ShenandoahMarkTask t;
// Do not use active_generation() : we must use the gc_generation() set by
// ShenandoahGCScope on the ControllerThread's stack; no safepoint may
// intervene to update active_generation, so we can't
// shenandoah_assert_generations_reconciled() here.
assert(heap->gc_generation()->type() == GENERATION, "Sanity: %d != %d", heap->gc_generation()->type(), GENERATION);
heap->gc_generation()->ref_processor()->set_mark_closure(worker_id, cl);
assert(_generation->type() == GENERATION, "Sanity: %d != %d", _generation->type(), GENERATION);
_generation->ref_processor()->set_mark_closure(worker_id, cl);
/*
* Process outstanding queues, if any.

View File

@ -41,7 +41,6 @@ enum StringDedupMode {
};
class ShenandoahMarkingContext;
class ShenandoahReferenceProcessor;
// Base class for mark
// Mark class does not maintain states. Instead, mark states are
@ -72,7 +71,7 @@ public:
inline ShenandoahObjToScanQueue* get_queue(uint index) const;
inline ShenandoahObjToScanQueue* get_old_queue(uint index) const;
inline ShenandoahGeneration* generation() { return _generation; };
ShenandoahGeneration* generation() const { return _generation; };
private:
// ---------- Marking loop and tasks
@ -93,7 +92,7 @@ private:
void mark_loop_work(T* cl, ShenandoahLiveData* live_data, uint worker_id, TaskTerminator *t, StringDedup::Requests* const req);
template <ShenandoahGenerationType GENERATION, bool CANCELLABLE, StringDedupMode STRING_DEDUP>
void mark_loop_prework(uint worker_id, TaskTerminator *terminator, ShenandoahReferenceProcessor *rp, StringDedup::Requests* const req, bool update_refs);
void mark_loop_prework(uint worker_id, TaskTerminator *terminator, StringDedup::Requests* const req, bool update_refs);
template <ShenandoahGenerationType GENERATION>
static bool in_generation(ShenandoahHeap* const heap, oop obj);
@ -109,11 +108,11 @@ private:
inline void dedup_string(oop obj, StringDedup::Requests* const req);
protected:
template<bool CANCELLABLE, StringDedupMode STRING_DEDUP>
void mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahReferenceProcessor *rp,
ShenandoahGenerationType generation, StringDedup::Requests* const req);
void mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahGenerationType generation_type,
StringDedup::Requests* const req);
void mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahReferenceProcessor *rp,
ShenandoahGenerationType generation, bool cancellable, StringDedupMode dedup_mode, StringDedup::Requests* const req);
void mark_loop(uint worker_id, TaskTerminator* terminator, ShenandoahGenerationType generation_type,
bool cancellable, StringDedupMode dedup_mode, StringDedup::Requests* const req);
};
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_HPP

View File

@ -49,7 +49,7 @@ void ShenandoahOldGC::op_final_mark() {
assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");
if (ShenandoahVerify) {
heap->verifier()->verify_roots_no_forwarded();
heap->verifier()->verify_roots_no_forwarded(_old_generation);
}
if (!heap->cancelled_gc()) {

View File

@ -329,25 +329,31 @@ bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type
return true;
}
shenandoah_assert_mark_complete(raw_referent);
ShenandoahHeap* heap = ShenandoahHeap::heap();
// Check if the referent is still alive, in which case we should
// drop the reference.
// Check if the referent is still alive, in which case we should drop the reference.
if (type == REF_PHANTOM) {
return heap->active_generation()->complete_marking_context()->is_marked(raw_referent);
return heap->marking_context()->is_marked(raw_referent);
} else {
return heap->active_generation()->complete_marking_context()->is_marked_strong(raw_referent);
return heap->marking_context()->is_marked_strong(raw_referent);
}
}
template <typename T>
void ShenandoahReferenceProcessor::make_inactive(oop reference, ReferenceType type) const {
if (type == REF_FINAL) {
#ifdef ASSERT
auto referent = reference_referent_raw<T>(reference);
auto heap = ShenandoahHeap::heap();
shenandoah_assert_mark_complete(referent);
assert(reference_next<T>(reference) == nullptr, "Already inactive");
assert(heap->marking_context()->is_marked(referent), "only make inactive final refs with alive referents");
#endif
// Don't clear referent. It is needed by the Finalizer thread to make the call
// to finalize(). A FinalReference is instead made inactive by self-looping the
// next field. An application can't call FinalReference.enqueue(), so there is
// no race to worry about when setting the next field.
assert(reference_next<T>(reference) == nullptr, "Already inactive");
assert(ShenandoahHeap::heap()->active_generation()->complete_marking_context()->is_marked(reference_referent_raw<T>(reference)), "only make inactive final refs with alive referents");
reference_set_next(reference, reference);
} else {
// Clear referent
@ -437,8 +443,12 @@ oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
HeapWord* raw_referent = reference_referent_raw<T>(reference);
#ifdef ASSERT
assert(raw_referent == nullptr || ShenandoahHeap::heap()->active_generation()->complete_marking_context()->is_marked(raw_referent),
"only drop references with alive referents");
if (raw_referent != nullptr) {
ShenandoahHeap* heap = ShenandoahHeap::heap();
ShenandoahHeapRegion* region = heap->heap_region_containing(raw_referent);
ShenandoahMarkingContext* ctx = heap->generation_for(region->affiliation())->complete_marking_context();
assert(ctx->is_marked(raw_referent), "only drop references with alive referents");
}
#endif
// Unlink and return next in list

View File

@ -61,7 +61,7 @@ ShenandoahGCStateResetter::~ShenandoahGCStateResetter() {
assert(_heap->gc_state() == _saved_gc_state, "Should be restored");
}
void ShenandoahRootVerifier::roots_do(OopIterateClosure* oops) {
void ShenandoahRootVerifier::roots_do(OopIterateClosure* oops, ShenandoahGeneration* generation) {
ShenandoahGCStateResetter resetter;
shenandoah_assert_safepoint();
@ -75,9 +75,9 @@ void ShenandoahRootVerifier::roots_do(OopIterateClosure* oops) {
OopStorageSet::storage(id)->oops_do(oops);
}
ShenandoahHeap* heap = ShenandoahHeap::heap();
if (heap->mode()->is_generational() && heap->active_generation()->is_young()) {
if (generation->is_young()) {
shenandoah_assert_safepoint();
shenandoah_assert_generational();
ShenandoahGenerationalHeap::heap()->old_generation()->card_scan()->roots_do(oops);
}
@ -87,7 +87,7 @@ void ShenandoahRootVerifier::roots_do(OopIterateClosure* oops) {
Threads::possibly_parallel_oops_do(true, oops, nullptr);
}
void ShenandoahRootVerifier::strong_roots_do(OopIterateClosure* oops) {
void ShenandoahRootVerifier::strong_roots_do(OopIterateClosure* oops, ShenandoahGeneration* generation) {
ShenandoahGCStateResetter resetter;
shenandoah_assert_safepoint();
@ -98,8 +98,8 @@ void ShenandoahRootVerifier::strong_roots_do(OopIterateClosure* oops) {
OopStorageSet::storage(id)->oops_do(oops);
}
ShenandoahHeap* heap = ShenandoahHeap::heap();
if (heap->mode()->is_generational() && heap->active_generation()->is_young()) {
if (generation->is_young()) {
shenandoah_assert_generational();
ShenandoahGenerationalHeap::heap()->old_generation()->card_scan()->roots_do(oops);
}

View File

@ -43,8 +43,10 @@ public:
class ShenandoahRootVerifier : public AllStatic {
public:
// Used to seed ShenandoahVerifier, do not honor root type filter
static void roots_do(OopIterateClosure* cl);
static void strong_roots_do(OopIterateClosure* cl);
// The generation parameter here may be young or global. If it is young,
// then the roots will include the remembered set.
static void roots_do(OopIterateClosure* cl, ShenandoahGeneration* generation);
static void strong_roots_do(OopIterateClosure* cl, ShenandoahGeneration* generation);
};
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHROOTVERIFIER_HPP

View File

@ -77,15 +77,13 @@ void ShenandoahSTWMark::mark() {
ShenandoahCodeRoots::arm_nmethods_for_mark();
// Weak reference processing
assert(ShenandoahHeap::heap()->gc_generation() == _generation, "Marking unexpected generation");
ShenandoahReferenceProcessor* rp = _generation->ref_processor();
shenandoah_assert_generations_reconciled();
rp->reset_thread_locals();
// Init mark, do not expect forwarded pointers in roots
if (ShenandoahVerify) {
assert(Thread::current()->is_VM_thread(), "Must be");
heap->verifier()->verify_roots_no_forwarded();
heap->verifier()->verify_roots_no_forwarded(_generation);
}
start_mark();
@ -119,7 +117,6 @@ void ShenandoahSTWMark::mark() {
}
void ShenandoahSTWMark::mark_roots(uint worker_id) {
assert(ShenandoahHeap::heap()->gc_generation() == _generation, "Marking unexpected generation");
ShenandoahReferenceProcessor* rp = _generation->ref_processor();
auto queue = task_queues()->queue(worker_id);
switch (_generation->type()) {
@ -148,14 +145,10 @@ void ShenandoahSTWMark::mark_roots(uint worker_id) {
}
void ShenandoahSTWMark::finish_mark(uint worker_id) {
assert(ShenandoahHeap::heap()->gc_generation() == _generation, "Marking unexpected generation");
ShenandoahPhaseTimings::Phase phase = _full_gc ? ShenandoahPhaseTimings::full_gc_mark : ShenandoahPhaseTimings::degen_gc_stw_mark;
ShenandoahWorkerTimingsTracker timer(phase, ShenandoahPhaseTimings::ParallelMark, worker_id);
ShenandoahReferenceProcessor* rp = _generation->ref_processor();
shenandoah_assert_generations_reconciled();
StringDedup::Requests requests;
mark_loop(worker_id, &_terminator, rp,
_generation->type(), false /* not cancellable */,
mark_loop(worker_id, &_terminator, _generation->type(), false /* not cancellable */,
ShenandoahStringDedup::is_enabled() ? ALWAYS_DEDUP : NO_DEDUP, &requests);
}

View File

@ -50,16 +50,14 @@ void VM_ShenandoahOperation::doit_epilogue() {
void VM_ShenandoahOperation::log_active_generation(const char* prefix) {
ShenandoahGeneration* agen = ShenandoahHeap::heap()->active_generation();
ShenandoahGeneration* ggen = ShenandoahHeap::heap()->gc_generation();
log_debug(gc, heap)("%s: active_generation is %s, gc_generation is %s", prefix,
agen == nullptr ? "nullptr" : shenandoah_generation_name(agen->type()),
ggen == nullptr ? "nullptr" : shenandoah_generation_name(ggen->type()));
log_debug(gc, heap)("%s: active_generation is %s", prefix,
agen == nullptr ? "nullptr" : shenandoah_generation_name(agen->type()));
}
void VM_ShenandoahOperation::set_active_generation() {
if (evaluate_at_safepoint()) {
assert(SafepointSynchronize::is_at_safepoint(), "Error??");
ShenandoahHeap::heap()->set_active_generation();
ShenandoahHeap::heap()->set_active_generation(_generation);
}
}
@ -77,42 +75,70 @@ void VM_ShenandoahReferenceOperation::doit_epilogue() {
Heap_lock->unlock();
}
VM_ShenandoahInitMark::VM_ShenandoahInitMark(ShenandoahConcurrentGC* gc)
: VM_ShenandoahOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahInitMark::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Init Mark", SvcGCMarker::CONCURRENT);
set_active_generation();
_gc->entry_init_mark();
}
VM_ShenandoahFinalMarkStartEvac::VM_ShenandoahFinalMarkStartEvac(ShenandoahConcurrentGC* gc)
: VM_ShenandoahOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahFinalMarkStartEvac::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Final Mark", SvcGCMarker::CONCURRENT);
set_active_generation();
_gc->entry_final_mark();
}
VM_ShenandoahFullGC::VM_ShenandoahFullGC(GCCause::Cause gc_cause, ShenandoahFullGC* full_gc)
: VM_ShenandoahReferenceOperation(full_gc->generation()), _gc_cause(gc_cause), _full_gc(full_gc) {
}
void VM_ShenandoahFullGC::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Full GC", SvcGCMarker::FULL);
set_active_generation();
_full_gc->entry_full(_gc_cause);
}
VM_ShenandoahDegeneratedGC::VM_ShenandoahDegeneratedGC(ShenandoahDegenGC* gc)
: VM_ShenandoahReferenceOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahDegeneratedGC::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Degenerated GC", SvcGCMarker::CONCURRENT);
set_active_generation();
_gc->entry_degenerated();
}
VM_ShenandoahInitUpdateRefs::VM_ShenandoahInitUpdateRefs(ShenandoahConcurrentGC* gc)
: VM_ShenandoahOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahInitUpdateRefs::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Init Update Refs", SvcGCMarker::CONCURRENT);
set_active_generation();
_gc->entry_init_update_refs();
}
VM_ShenandoahFinalUpdateRefs::VM_ShenandoahFinalUpdateRefs(ShenandoahConcurrentGC* gc)
: VM_ShenandoahOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahFinalUpdateRefs::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Final Update Refs", SvcGCMarker::CONCURRENT);
set_active_generation();
_gc->entry_final_update_refs();
}
VM_ShenandoahFinalRoots::VM_ShenandoahFinalRoots(ShenandoahConcurrentGC* gc)
: VM_ShenandoahOperation(gc->generation()), _gc(gc) {
}
void VM_ShenandoahFinalRoots::doit() {
ShenandoahGCPauseMark mark(_gc_id, "Final Roots", SvcGCMarker::CONCURRENT);
set_active_generation();

View File

@ -46,10 +46,15 @@ class ShenandoahFullGC;
class VM_ShenandoahOperation : public VM_Operation {
protected:
uint _gc_id;
ShenandoahGeneration* _generation;
void set_active_generation();
public:
VM_ShenandoahOperation() : _gc_id(GCId::current()) {};
explicit VM_ShenandoahOperation(ShenandoahGeneration* generation)
: _gc_id(GCId::current())
, _generation(generation) {
}
bool skip_thread_oop_barriers() const override { return true; }
void log_active_generation(const char* prefix);
@ -61,93 +66,74 @@ public:
class VM_ShenandoahReferenceOperation : public VM_ShenandoahOperation {
public:
VM_ShenandoahReferenceOperation() : VM_ShenandoahOperation() {};
explicit VM_ShenandoahReferenceOperation(ShenandoahGeneration* generation)
: VM_ShenandoahOperation(generation) {};
bool doit_prologue() override;
void doit_epilogue() override;
};
class VM_ShenandoahInitMark: public VM_ShenandoahOperation {
private:
ShenandoahConcurrentGC* const _gc;
public:
VM_ShenandoahInitMark(ShenandoahConcurrentGC* gc) :
VM_ShenandoahOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahInitMark; }
const char* name() const { return "Shenandoah Init Marking"; }
virtual void doit();
explicit VM_ShenandoahInitMark(ShenandoahConcurrentGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahInitMark; }
const char* name() const override { return "Shenandoah Init Marking"; }
void doit() override;
};
class VM_ShenandoahFinalMarkStartEvac: public VM_ShenandoahOperation {
private:
ShenandoahConcurrentGC* const _gc;
public:
VM_ShenandoahFinalMarkStartEvac(ShenandoahConcurrentGC* gc) :
VM_ShenandoahOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahFinalMarkStartEvac; }
const char* name() const { return "Shenandoah Final Mark and Start Evacuation"; }
virtual void doit();
explicit VM_ShenandoahFinalMarkStartEvac(ShenandoahConcurrentGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahFinalMarkStartEvac; }
const char* name() const override { return "Shenandoah Final Mark and Start Evacuation"; }
void doit() override;
};
class VM_ShenandoahDegeneratedGC: public VM_ShenandoahReferenceOperation {
private:
ShenandoahDegenGC* const _gc;
public:
VM_ShenandoahDegeneratedGC(ShenandoahDegenGC* gc) :
VM_ShenandoahReferenceOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahDegeneratedGC; }
const char* name() const { return "Shenandoah Degenerated GC"; }
virtual void doit();
explicit VM_ShenandoahDegeneratedGC(ShenandoahDegenGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahDegeneratedGC; }
const char* name() const override { return "Shenandoah Degenerated GC"; }
void doit() override;
};
class VM_ShenandoahFullGC : public VM_ShenandoahReferenceOperation {
private:
GCCause::Cause _gc_cause;
ShenandoahFullGC* const _full_gc;
public:
VM_ShenandoahFullGC(GCCause::Cause gc_cause, ShenandoahFullGC* full_gc) :
VM_ShenandoahReferenceOperation(),
_gc_cause(gc_cause),
_full_gc(full_gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahFullGC; }
const char* name() const { return "Shenandoah Full GC"; }
virtual void doit();
explicit VM_ShenandoahFullGC(GCCause::Cause gc_cause, ShenandoahFullGC* full_gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahFullGC; }
const char* name() const override { return "Shenandoah Full GC"; }
void doit() override;
};
class VM_ShenandoahInitUpdateRefs: public VM_ShenandoahOperation {
ShenandoahConcurrentGC* const _gc;
public:
VM_ShenandoahInitUpdateRefs(ShenandoahConcurrentGC* gc) :
VM_ShenandoahOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahInitUpdateRefs; }
const char* name() const { return "Shenandoah Init Update References"; }
virtual void doit();
explicit VM_ShenandoahInitUpdateRefs(ShenandoahConcurrentGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahInitUpdateRefs; }
const char* name() const override { return "Shenandoah Init Update References"; }
void doit() override;
};
class VM_ShenandoahFinalUpdateRefs: public VM_ShenandoahOperation {
ShenandoahConcurrentGC* const _gc;
public:
VM_ShenandoahFinalUpdateRefs(ShenandoahConcurrentGC* gc) :
VM_ShenandoahOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahFinalUpdateRefs; }
const char* name() const { return "Shenandoah Final Update References"; }
virtual void doit();
explicit VM_ShenandoahFinalUpdateRefs(ShenandoahConcurrentGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahFinalUpdateRefs; }
const char* name() const override { return "Shenandoah Final Update References"; }
void doit() override;
};
class VM_ShenandoahFinalRoots: public VM_ShenandoahOperation {
ShenandoahConcurrentGC* const _gc;
public:
VM_ShenandoahFinalRoots(ShenandoahConcurrentGC* gc) :
VM_ShenandoahOperation(),
_gc(gc) {};
VM_Operation::VMOp_Type type() const { return VMOp_ShenandoahFinalRoots; }
const char* name() const { return "Shenandoah Final Roots"; }
virtual void doit();
explicit VM_ShenandoahFinalRoots(ShenandoahConcurrentGC* gc);
VM_Operation::VMOp_Type type() const override { return VMOp_ShenandoahFinalRoots; }
const char* name() const override { return "Shenandoah Final Roots"; }
void doit() override;
};
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHVMOPERATIONS_HPP

View File

@ -70,7 +70,8 @@ private:
ShenandoahGeneration* _generation;
public:
ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
ShenandoahVerifyOopClosure(ShenandoahGeneration* generation, ShenandoahVerifierStack* stack,
MarkBitMap* map, ShenandoahLivenessData* ld,
const char* phase, ShenandoahVerifier::VerifyOptions options) :
_phase(phase),
_options(options),
@ -80,7 +81,7 @@ public:
_ld(ld),
_interior_loc(nullptr),
_loc(nullptr),
_generation(nullptr) {
_generation(generation) {
if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
@ -92,12 +93,6 @@ public:
// Otherwise do all fields.
_ref_mode = DO_FIELDS;
}
if (_heap->mode()->is_generational()) {
_generation = _heap->gc_generation();
assert(_generation != nullptr, "Expected active generation in this mode");
shenandoah_assert_generations_reconciled();
}
}
ReferenceIterationMode reference_iteration_mode() override {
@ -131,11 +126,7 @@ private:
}
}
bool in_generation(oop obj) {
if (_generation == nullptr) {
return true;
}
bool in_generation(oop obj) const {
ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
return _generation->contains(region);
}
@ -197,9 +188,8 @@ private:
// fallthrough for fast failure for un-live regions:
case ShenandoahVerifier::_verify_liveness_conservative:
check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
(obj_reg->is_old() && _heap->gc_generation()->is_young()),
(obj_reg->is_old() && _generation->is_young()),
"Object must belong to region with live data");
shenandoah_assert_generations_reconciled();
break;
default:
assert(false, "Unhandled liveness verification");
@ -276,12 +266,12 @@ private:
"Must be marked in incomplete bitmap");
break;
case ShenandoahVerifier::_verify_marked_complete:
check(ShenandoahAsserts::_safe_all, obj, _heap->gc_generation()->complete_marking_context()->is_marked(obj),
check(ShenandoahAsserts::_safe_all, obj, _generation->complete_marking_context()->is_marked(obj),
"Must be marked in complete bitmap");
break;
case ShenandoahVerifier::_verify_marked_complete_except_references:
case ShenandoahVerifier::_verify_marked_complete_satb_empty:
check(ShenandoahAsserts::_safe_all, obj, _heap->gc_generation()->complete_marking_context()->is_marked(obj),
check(ShenandoahAsserts::_safe_all, obj, _generation->complete_marking_context()->is_marked(obj),
"Must be marked in complete bitmap, except j.l.r.Reference referents");
break;
default:
@ -571,9 +561,11 @@ private:
ShenandoahLivenessData* _ld;
MarkBitMap* _bitmap;
volatile size_t _processed;
ShenandoahGeneration* _generation;
public:
ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
ShenandoahVerifierReachableTask(ShenandoahGeneration* generation,
MarkBitMap* bitmap,
ShenandoahLivenessData* ld,
const char* label,
ShenandoahVerifier::VerifyOptions options) :
@ -583,7 +575,8 @@ public:
_heap(ShenandoahHeap::heap()),
_ld(ld),
_bitmap(bitmap),
_processed(0) {};
_processed(0),
_generation(generation) {};
size_t processed() const {
return _processed;
@ -599,20 +592,20 @@ public:
// extended parallelism would buy us out.
if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
|| (ShenandoahVerifyLevel >= 3)) {
ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
ShenandoahMessageBuffer("%s, Roots", _label),
_options);
if (_heap->unload_classes()) {
ShenandoahRootVerifier::strong_roots_do(&cl);
ShenandoahRootVerifier::strong_roots_do(&cl, _generation);
} else {
ShenandoahRootVerifier::roots_do(&cl);
ShenandoahRootVerifier::roots_do(&cl, _generation);
}
}
size_t processed = 0;
if (ShenandoahVerifyLevel >= 3) {
ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
ShenandoahMessageBuffer("%s, Reachable", _label),
_options);
while (!stack.is_empty()) {
@ -648,7 +641,8 @@ private:
ShenandoahGeneration* _generation;
public:
ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
ShenandoahVerifierMarkedRegionTask(ShenandoahGeneration* generation,
MarkBitMap* bitmap,
ShenandoahLivenessData* ld,
const char* label,
ShenandoahVerifier::VerifyOptions options) :
@ -660,13 +654,7 @@ public:
_ld(ld),
_claimed(0),
_processed(0),
_generation(nullptr) {
if (_heap->mode()->is_generational()) {
_generation = _heap->gc_generation();
assert(_generation != nullptr, "Expected active generation in this mode.");
shenandoah_assert_generations_reconciled();
}
};
_generation(generation) {}
size_t processed() {
return AtomicAccess::load(&_processed);
@ -679,7 +667,7 @@ public:
}
ShenandoahVerifierStack stack;
ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
ShenandoahMessageBuffer("%s, Marked", _label),
_options);
@ -702,14 +690,14 @@ public:
}
}
bool in_generation(ShenandoahHeapRegion* r) {
return _generation == nullptr || _generation->contains(r);
bool in_generation(ShenandoahHeapRegion* r) const {
return _generation->contains(r);
}
virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
size_t processed = 0;
HeapWord* obj = r->bottom();
if (_heap->gc_generation()->complete_marking_context()->is_marked(cast_to_oop(obj))) {
if (_generation->complete_marking_context()->is_marked(cast_to_oop(obj))) {
verify_and_follow(obj, stack, cl, &processed);
}
AtomicAccess::add(&_processed, processed, memory_order_relaxed);
@ -717,7 +705,7 @@ public:
virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
size_t processed = 0;
ShenandoahMarkingContext* ctx = _heap->gc_generation()->complete_marking_context();
ShenandoahMarkingContext* ctx = _generation->complete_marking_context();
HeapWord* tams = ctx->top_at_mark_start(r);
// Bitmaps, before TAMS
@ -794,7 +782,8 @@ public:
}
};
void ShenandoahVerifier::verify_at_safepoint(const char* label,
void ShenandoahVerifier::verify_at_safepoint(ShenandoahGeneration* generation,
const char* label,
VerifyRememberedSet remembered,
VerifyForwarded forwarded,
VerifyMarked marked,
@ -896,16 +885,7 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
log_debug(gc)("Safepoint verification finished heap usage verification");
ShenandoahGeneration* generation;
if (_heap->mode()->is_generational()) {
generation = _heap->gc_generation();
guarantee(generation != nullptr, "Need to know which generation to verify.");
shenandoah_assert_generations_reconciled();
} else {
generation = nullptr;
}
if (generation != nullptr) {
ShenandoahHeapLocker lock(_heap->lock());
switch (remembered) {
@ -952,11 +932,7 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
// Internal heap region checks
if (ShenandoahVerifyLevel >= 1) {
ShenandoahVerifyHeapRegionClosure cl(label, regions);
if (generation != nullptr) {
generation->heap_region_iterate(&cl);
} else {
_heap->heap_region_iterate(&cl);
}
generation->heap_region_iterate(&cl);
}
log_debug(gc)("Safepoint verification finished heap region closure verification");
@ -980,7 +956,7 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
// This verifies what application can see, since it only cares about reachable objects.
size_t count_reachable = 0;
if (ShenandoahVerifyLevel >= 2) {
ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
ShenandoahVerifierReachableTask task(generation, _verification_bit_map, ld, label, options);
_heap->workers()->run_task(&task);
count_reachable = task.processed();
}
@ -999,8 +975,8 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
(marked == _verify_marked_complete ||
marked == _verify_marked_complete_except_references ||
marked == _verify_marked_complete_satb_empty)) {
guarantee(_heap->gc_generation()->is_mark_complete(), "Marking context should be complete");
ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
guarantee(generation->is_mark_complete(), "Marking context should be complete");
ShenandoahVerifierMarkedRegionTask task(generation, _verification_bit_map, ld, label, options);
_heap->workers()->run_task(&task);
count_marked = task.processed();
} else {
@ -1015,7 +991,7 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
for (size_t i = 0; i < _heap->num_regions(); i++) {
ShenandoahHeapRegion* r = _heap->get_region(i);
if (generation != nullptr && !generation->contains(r)) {
if (!generation->contains(r)) {
continue;
}
@ -1042,16 +1018,15 @@ void ShenandoahVerifier::verify_at_safepoint(const char* label,
}
log_debug(gc)("Safepoint verification finished accumulation of liveness data");
log_info(gc)("Verify %s, Level %zd (%zu reachable, %zu marked)",
label, ShenandoahVerifyLevel, count_reachable, count_marked);
FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
}
void ShenandoahVerifier::verify_generic(VerifyOption vo) {
void ShenandoahVerifier::verify_generic(ShenandoahGeneration* generation, VerifyOption vo) {
verify_at_safepoint(
generation,
"Generic Verification",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_allow, // conservatively allow forwarded
@ -1064,7 +1039,7 @@ void ShenandoahVerifier::verify_generic(VerifyOption vo) {
);
}
void ShenandoahVerifier::verify_before_concmark() {
void ShenandoahVerifier::verify_before_concmark(ShenandoahGeneration* generation) {
VerifyRememberedSet verify_remembered_set = _verify_remembered_before_marking;
if (_heap->mode()->is_generational() &&
!_heap->old_generation()->is_mark_complete()) {
@ -1072,6 +1047,7 @@ void ShenandoahVerifier::verify_before_concmark() {
verify_remembered_set = _verify_remembered_disable;
}
verify_at_safepoint(
generation,
"Before Mark",
verify_remembered_set,
// verify read-only remembered set from bottom() to top()
@ -1085,8 +1061,9 @@ void ShenandoahVerifier::verify_before_concmark() {
);
}
void ShenandoahVerifier::verify_after_concmark() {
void ShenandoahVerifier::verify_after_concmark(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"After Mark",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_none, // no forwarded references
@ -1099,8 +1076,9 @@ void ShenandoahVerifier::verify_after_concmark() {
);
}
void ShenandoahVerifier::verify_after_concmark_with_promotions() {
void ShenandoahVerifier::verify_after_concmark_with_promotions(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"After Mark",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_none, // no forwarded references
@ -1114,8 +1092,9 @@ void ShenandoahVerifier::verify_after_concmark_with_promotions() {
);
}
void ShenandoahVerifier::verify_before_evacuation() {
void ShenandoahVerifier::verify_before_evacuation(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"Before Evacuation",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_none, // no forwarded references
@ -1129,13 +1108,14 @@ void ShenandoahVerifier::verify_before_evacuation() {
);
}
void ShenandoahVerifier::verify_before_update_refs() {
void ShenandoahVerifier::verify_before_update_refs(ShenandoahGeneration* generation) {
VerifyRememberedSet verify_remembered_set = _verify_remembered_before_updating_references;
if (_heap->mode()->is_generational() &&
!_heap->old_generation()->is_mark_complete()) {
verify_remembered_set = _verify_remembered_disable;
}
verify_at_safepoint(
generation,
"Before Updating References",
verify_remembered_set, // verify read-write remembered set
_verify_forwarded_allow, // forwarded references allowed
@ -1149,8 +1129,9 @@ void ShenandoahVerifier::verify_before_update_refs() {
}
// We have not yet cleanup (reclaimed) the collection set
void ShenandoahVerifier::verify_after_update_refs() {
void ShenandoahVerifier::verify_after_update_refs(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"After Updating References",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_none, // no forwarded references
@ -1163,8 +1144,9 @@ void ShenandoahVerifier::verify_after_update_refs() {
);
}
void ShenandoahVerifier::verify_after_degenerated() {
void ShenandoahVerifier::verify_after_degenerated(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"After Degenerated GC",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_none, // all objects are non-forwarded
@ -1177,8 +1159,9 @@ void ShenandoahVerifier::verify_after_degenerated() {
);
}
void ShenandoahVerifier::verify_before_fullgc() {
void ShenandoahVerifier::verify_before_fullgc(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"Before Full GC",
_verify_remembered_disable, // do not verify remembered set
_verify_forwarded_allow, // can have forwarded objects
@ -1191,8 +1174,9 @@ void ShenandoahVerifier::verify_before_fullgc() {
);
}
void ShenandoahVerifier::verify_after_fullgc() {
void ShenandoahVerifier::verify_after_fullgc(ShenandoahGeneration* generation) {
verify_at_safepoint(
generation,
"After Full GC",
_verify_remembered_after_full_gc, // verify read-write remembered set
_verify_forwarded_none, // all objects are non-forwarded
@ -1257,14 +1241,14 @@ public:
void do_oop(oop* p) override { do_oop_work(p); }
};
void ShenandoahVerifier::verify_roots_in_to_space() {
void ShenandoahVerifier::verify_roots_in_to_space(ShenandoahGeneration* generation) {
ShenandoahVerifyInToSpaceClosure cl;
ShenandoahRootVerifier::roots_do(&cl);
ShenandoahRootVerifier::roots_do(&cl, generation);
}
void ShenandoahVerifier::verify_roots_no_forwarded() {
void ShenandoahVerifier::verify_roots_no_forwarded(ShenandoahGeneration* generation) {
ShenandoahVerifyNoForwarded cl;
ShenandoahRootVerifier::roots_do(&cl);
ShenandoahRootVerifier::roots_do(&cl, generation);
}
template<typename Scanner>
@ -1300,7 +1284,6 @@ public:
template<typename Scanner>
void ShenandoahVerifier::help_verify_region_rem_set(Scanner* scanner, ShenandoahHeapRegion* r,
HeapWord* registration_watermark, const char* message) {
shenandoah_assert_generations_reconciled();
ShenandoahOldGeneration* old_gen = _heap->old_generation();
assert(old_gen->is_mark_complete() || old_gen->is_parsable(), "Sanity");

View File

@ -196,7 +196,8 @@ public:
};
private:
void verify_at_safepoint(const char* label,
void verify_at_safepoint(ShenandoahGeneration* generation,
const char* label,
VerifyRememberedSet remembered,
VerifyForwarded forwarded,
VerifyMarked marked,
@ -210,20 +211,20 @@ public:
ShenandoahVerifier(ShenandoahHeap* heap, MarkBitMap* verification_bitmap) :
_heap(heap), _verification_bit_map(verification_bitmap) {};
void verify_before_concmark();
void verify_after_concmark();
void verify_after_concmark_with_promotions();
void verify_before_evacuation();
void verify_before_update_refs();
void verify_after_update_refs();
void verify_before_fullgc();
void verify_after_fullgc();
void verify_after_degenerated();
void verify_generic(VerifyOption option);
void verify_before_concmark(ShenandoahGeneration* generation);
void verify_after_concmark(ShenandoahGeneration* generation);
void verify_after_concmark_with_promotions(ShenandoahGeneration* generation);
void verify_before_evacuation(ShenandoahGeneration* generation);
void verify_before_update_refs(ShenandoahGeneration* generation);
void verify_after_update_refs(ShenandoahGeneration* generation);
void verify_before_fullgc(ShenandoahGeneration* generation);
void verify_after_fullgc(ShenandoahGeneration* generation);
void verify_after_degenerated(ShenandoahGeneration* generation);
void verify_generic(ShenandoahGeneration* generation, VerifyOption option);
// Roots should only contain to-space oops
void verify_roots_in_to_space();
void verify_roots_no_forwarded();
void verify_roots_in_to_space(ShenandoahGeneration* generation);
void verify_roots_no_forwarded(ShenandoahGeneration* generation);
// Check that generation usages are accurate before rebuilding free set
void verify_before_rebuilding_free_set();