8332448: Make SpaceMangler inherit AllStatic

Reviewed-by: kbarrett, iwalulya
This commit is contained in:
Albert Mingkun Yang 2024-05-21 07:43:54 +00:00
parent 8a49d47cf3
commit 5f2b8d0224
26 changed files with 36 additions and 515 deletions

View File

@ -83,18 +83,7 @@ void MutableNUMASpace::mangle_unused_area() {
void MutableNUMASpace::mangle_region(MemRegion mr) {
// This method should do nothing because numa spaces are not mangled.
}
void MutableNUMASpace::set_top_for_allocations(HeapWord* v) {
assert(false, "Do not mangle MutableNUMASpace's");
}
void MutableNUMASpace::set_top_for_allocations() {
// This method should do nothing.
}
void MutableNUMASpace::check_mangled_unused_area(HeapWord* limit) {
// This method should do nothing.
}
void MutableNUMASpace::check_mangled_unused_area_complete() {
// This method should do nothing.
}
#endif // NOT_PRODUCT
// There may be unallocated holes in the middle chunks

View File

@ -175,10 +175,6 @@ public:
virtual void mangle_unused_area() PRODUCT_RETURN;
virtual void mangle_region(MemRegion mr) PRODUCT_RETURN;
virtual void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
virtual void check_mangled_unused_area_complete() PRODUCT_RETURN;
virtual void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
virtual void set_top_for_allocations() PRODUCT_RETURN;
virtual void ensure_parsability();
virtual size_t used_in_words() const;

View File

@ -25,7 +25,6 @@
#include "precompiled.hpp"
#include "gc/parallel/mutableSpace.hpp"
#include "gc/shared/pretouchTask.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "memory/iterator.inline.hpp"
#include "memory/universe.hpp"
#include "oops/oop.inline.hpp"
@ -36,7 +35,6 @@
#include "utilities/macros.hpp"
MutableSpace::MutableSpace(size_t alignment) :
_mangler(nullptr),
_last_setup_region(),
_alignment(alignment),
_bottom(nullptr),
@ -45,11 +43,6 @@ MutableSpace::MutableSpace(size_t alignment) :
{
assert(MutableSpace::alignment() % os::vm_page_size() == 0,
"Space should be aligned");
_mangler = new MutableSpaceMangler(this);
}
MutableSpace::~MutableSpace() {
delete _mangler;
}
void MutableSpace::numa_setup_pages(MemRegion mr, size_t page_size, bool clear_space) {
@ -152,32 +145,15 @@ void MutableSpace::clear(bool mangle_space) {
}
#ifndef PRODUCT
void MutableSpace::check_mangled_unused_area(HeapWord* limit) {
mangler()->check_mangled_unused_area(limit);
}
void MutableSpace::check_mangled_unused_area_complete() {
mangler()->check_mangled_unused_area_complete();
}
// Mangle only the unused space that has not previously
// been mangled and that has not been allocated since being
// mangled.
void MutableSpace::mangle_unused_area() {
mangler()->mangle_unused_area();
mangle_region(MemRegion(_top, _end));
}
void MutableSpace::mangle_region(MemRegion mr) {
SpaceMangler::mangle_region(mr);
}
void MutableSpace::set_top_for_allocations(HeapWord* v) {
mangler()->set_top_for_allocations(v);
}
void MutableSpace::set_top_for_allocations() {
mangler()->set_top_for_allocations(top());
}
#endif
HeapWord* MutableSpace::cas_allocate(size_t size) {

View File

@ -46,13 +46,9 @@ class WorkerThreads;
// Invariant: bottom() <= top() <= end()
// top() and end() are exclusive.
class MutableSpaceMangler;
class MutableSpace: public CHeapObj<mtGC> {
friend class VMStructs;
// Helper for mangling unused space in debug builds
MutableSpaceMangler* _mangler;
// The last region which page had been setup to be interleaved.
MemRegion _last_setup_region;
size_t _alignment;
@ -60,15 +56,13 @@ class MutableSpace: public CHeapObj<mtGC> {
HeapWord* volatile _top;
HeapWord* _end;
MutableSpaceMangler* mangler() { return _mangler; }
void numa_setup_pages(MemRegion mr, size_t page_size, bool clear_space);
void set_last_setup_region(MemRegion mr) { _last_setup_region = mr; }
MemRegion last_setup_region() const { return _last_setup_region; }
public:
virtual ~MutableSpace();
virtual ~MutableSpace() = default;
MutableSpace(size_t page_size);
// Accessors
@ -107,20 +101,11 @@ class MutableSpace: public CHeapObj<mtGC> {
virtual void update() { }
virtual void accumulate_statistics() { }
// Methods used in mangling. See descriptions under SpaceMangler.
virtual void mangle_unused_area() PRODUCT_RETURN;
virtual void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
virtual void check_mangled_unused_area_complete() PRODUCT_RETURN;
virtual void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
// Used to save the space's current top for later use during mangling.
virtual void set_top_for_allocations() PRODUCT_RETURN;
virtual void mangle_region(MemRegion mr) PRODUCT_RETURN;
virtual void ensure_parsability() { }
virtual void mangle_region(MemRegion mr) PRODUCT_RETURN;
// Boolean queries.
bool is_empty() const { return used_in_words() == 0; }
bool not_empty() const { return used_in_words() > 0; }

View File

@ -841,24 +841,6 @@ void ParallelScavengeHeap::complete_loaded_archive_space(MemRegion archive_space
_old_gen->complete_loaded_archive_space(archive_space);
}
#ifndef PRODUCT
void ParallelScavengeHeap::record_gen_tops_before_GC() {
if (ZapUnusedHeapArea) {
young_gen()->record_spaces_top();
old_gen()->record_spaces_top();
}
}
void ParallelScavengeHeap::gen_mangle_unused_area() {
if (ZapUnusedHeapArea) {
young_gen()->eden_space()->mangle_unused_area();
young_gen()->to_space()->mangle_unused_area();
young_gen()->from_space()->mangle_unused_area();
old_gen()->object_space()->mangle_unused_area();
}
}
#endif
void ParallelScavengeHeap::register_nmethod(nmethod* nm) {
ScavengableNMethods::register_nmethod(nm);
}

View File

@ -243,12 +243,6 @@ class ParallelScavengeHeap : public CollectedHeap {
// generation may be expanded in preparation for the resize.
void resize_old_gen(size_t desired_free_space);
// Save the tops of the spaces in all generations
void record_gen_tops_before_GC() PRODUCT_RETURN;
// Mangle the unused parts of all spaces in the heap
void gen_mangle_unused_area() PRODUCT_RETURN;
GCMemoryManager* old_gc_manager() const { return _old_manager; }
GCMemoryManager* young_gc_manager() const { return _young_manager; }

View File

@ -31,7 +31,6 @@
#include "gc/parallel/psOldGen.hpp"
#include "gc/shared/cardTableBarrierSet.hpp"
#include "gc/shared/gcLocker.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "logging/log.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/java.hpp"
@ -376,10 +375,3 @@ void PSOldGen::update_counters() {
void PSOldGen::verify() {
object_space()->verify();
}
#ifndef PRODUCT
void PSOldGen::record_spaces_top() {
assert(ZapUnusedHeapArea, "Not mangling unused space");
object_space()->set_top_for_allocations();
}
#endif

View File

@ -156,9 +156,6 @@ class PSOldGen : public CHeapObj<mtGC> {
// Printing support
const char* name() const { return "ParOldGen"; }
// Debugging support
// Save the tops of all spaces for later use during mangling.
void record_spaces_top() PRODUCT_RETURN;
};
#endif // SHARE_GC_PARALLEL_PSOLDGEN_HPP

View File

@ -57,7 +57,6 @@
#include "gc/shared/referencePolicy.hpp"
#include "gc/shared/referenceProcessor.hpp"
#include "gc/shared/referenceProcessorPhaseTimes.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "gc/shared/strongRootsScope.hpp"
#include "gc/shared/taskTerminator.hpp"
#include "gc/shared/weakProcessor.inline.hpp"
@ -963,8 +962,16 @@ void PSParallelCompact::post_compact()
for (unsigned int id = old_space_id; id < last_space_id; ++id) {
// Clear the marking bitmap, summary data and split info.
clear_data_covering_space(SpaceId(id));
// Update top(). Must be done after clearing the bitmap and summary data.
_space_info[id].publish_new_top();
{
MutableSpace* space = _space_info[id].space();
HeapWord* top = space->top();
HeapWord* new_top = _space_info[id].new_top();
if (ZapUnusedHeapArea && new_top < top) {
space->mangle_region(MemRegion(new_top, top));
}
// Update top(). Must be done after clearing the bitmap and summary data.
space->set_top(new_top);
}
}
ParCompactionManager::flush_all_string_dedup_requests();
@ -1007,10 +1014,6 @@ void PSParallelCompact::post_compact()
DerivedPointerTable::update_pointers();
#endif
if (ZapUnusedHeapArea) {
heap->gen_mangle_unused_area();
}
// Signal that we have completed a visit to all live objects.
Universe::heap()->record_whole_heap_examined_timestamp();
}
@ -1307,11 +1310,6 @@ bool PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
ClearedAllSoftRefs casr(maximum_heap_compaction,
heap->soft_ref_policy());
if (ZapUnusedHeapArea) {
// Save information needed to minimize mangling
heap->record_gen_tops_before_GC();
}
// Make sure data structures are sane, make the heap parsable, and do other
// miscellaneous bookkeeping.
pre_compact();
@ -1469,10 +1467,6 @@ bool PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
Universe::verify("After GC");
}
if (ZapUnusedHeapArea) {
old_gen->object_space()->check_mangled_unused_area_complete();
}
heap->print_heap_after_gc();
heap->trace_heap_after_gc(&_gc_tracer);

View File

@ -50,7 +50,6 @@
#include "gc/shared/referenceProcessor.hpp"
#include "gc/shared/referenceProcessorPhaseTimes.hpp"
#include "gc/shared/scavengableNMethods.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "gc/shared/strongRootsScope.hpp"
#include "gc/shared/taskTerminator.hpp"
#include "gc/shared/weakProcessor.inline.hpp"

View File

@ -29,7 +29,6 @@
#include "gc/parallel/psYoungGen.hpp"
#include "gc/shared/gcUtil.hpp"
#include "gc/shared/genArguments.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "logging/log.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/java.hpp"
@ -622,20 +621,6 @@ void PSYoungGen::resize_spaces(size_t requested_eden_size,
mangle_survivors(to_space(), toMR, from_space(), fromMR);
}
}
// If not mangling the spaces, do some checking to verify that
// the spaces are already mangled.
// The spaces should be correctly mangled at this point so
// do some checking here. Note that they are not being mangled
// in the calls to initialize().
// Must check mangling before the spaces are reshaped. Otherwise,
// the bottom or end of one space may have moved into an area
// covered by another space and a failure of the check may
// not correctly indicate which space is not properly mangled.
HeapWord* limit = (HeapWord*) virtual_space()->high();
eden_space()->check_mangled_unused_area(limit);
from_space()->check_mangled_unused_area(limit);
to_space()->check_mangled_unused_area(limit);
}
WorkerThreads* workers = &ParallelScavengeHeap::heap()->workers();
@ -833,12 +818,3 @@ void PSYoungGen::verify() {
from_space()->verify();
to_space()->verify();
}
#ifndef PRODUCT
void PSYoungGen::record_spaces_top() {
assert(ZapUnusedHeapArea, "Not mangling unused space");
eden_space()->set_top_for_allocations();
from_space()->set_top_for_allocations();
to_space()->set_top_for_allocations();
}
#endif

View File

@ -152,8 +152,6 @@ class PSYoungGen : public CHeapObj<mtGC> {
MemRegion s1MR,
MutableSpace* s2,
MemRegion s2MR) PRODUCT_RETURN;
void record_spaces_top() PRODUCT_RETURN;
};
#endif // SHARE_GC_PARALLEL_PSYOUNGGEN_HPP

View File

@ -43,7 +43,7 @@
#include "gc/shared/referencePolicy.hpp"
#include "gc/shared/referenceProcessorPhaseTimes.hpp"
#include "gc/shared/space.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "gc/shared/spaceDecorator.hpp"
#include "gc/shared/strongRootsScope.hpp"
#include "gc/shared/weakProcessor.hpp"
#include "logging/log.hpp"
@ -331,21 +331,6 @@ void DefNewGeneration::compute_space_boundaries(uintx minimum_eden_size,
// newly formed eden.
bool live_in_eden = minimum_eden_size > 0;
// If not clearing the spaces, do some checking to verify that
// the space are already mangled.
if (!clear_space) {
// Must check mangling before the spaces are reshaped. Otherwise,
// the bottom or end of one space may have moved into another
// a failure of the check may not correctly indicate which space
// is not properly mangled.
if (ZapUnusedHeapArea) {
HeapWord* limit = (HeapWord*) _virtual_space.high();
eden()->check_mangled_unused_area(limit);
from()->check_mangled_unused_area(limit);
to()->check_mangled_unused_area(limit);
}
}
// Reset the spaces for their new regions.
eden()->initialize(edenMR,
clear_space && !live_in_eden,
@ -717,16 +702,6 @@ bool DefNewGeneration::collect(bool clear_all_soft_refs) {
// Swap the survivor spaces.
eden()->clear(SpaceDecorator::Mangle);
from()->clear(SpaceDecorator::Mangle);
if (ZapUnusedHeapArea) {
// This is now done here because of the piece-meal mangling which
// can check for valid mangling at intermediate points in the
// collection(s). When a young collection fails to collect
// sufficient space resizing of the young generation can occur
// an redistribute the spaces in the young generation. Mangle
// here so that unzapped regions don't get distributed to
// other spaces.
to()->mangle_unused_area();
}
swap_spaces();
assert(to()->is_empty(), "to space should be empty now");
@ -892,7 +867,7 @@ void DefNewGeneration::reset_scratch() {
// to_space if ZapUnusedHeapArea. This is needed because
// top is not maintained while using to-space as scratch.
if (ZapUnusedHeapArea) {
to()->mangle_unused_area_complete();
to()->mangle_unused_area();
}
}
@ -950,24 +925,11 @@ void DefNewGeneration::gc_epilogue(bool full) {
#endif // ASSERT
}
if (ZapUnusedHeapArea) {
eden()->check_mangled_unused_area_complete();
from()->check_mangled_unused_area_complete();
to()->check_mangled_unused_area_complete();
}
// update the generation and space performance counters
update_counters();
gch->counters()->update_counters();
}
void DefNewGeneration::record_spaces_top() {
assert(ZapUnusedHeapArea, "Not mangling unused space");
eden()->set_top_for_allocations();
to()->set_top_for_allocations();
from()->set_top_for_allocations();
}
void DefNewGeneration::update_counters() {
if (UsePerfData) {
_eden_counters->update_all();

View File

@ -232,9 +232,6 @@ class DefNewGeneration: public Generation {
void gc_epilogue(bool full);
// Save the tops for eden, from, and to
void record_spaces_top();
// For Old collection (part of running Full GC), the DefNewGeneration can
// contribute the free part of "to-space" as the scratch space.
void contribute_scratch(void*& scratch, size_t& num_words);

View File

@ -31,7 +31,7 @@
#include "gc/shared/gcTimer.hpp"
#include "gc/shared/gcTrace.hpp"
#include "gc/shared/space.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "gc/shared/spaceDecorator.hpp"
#include "logging/log.hpp"
#include "memory/allocation.inline.hpp"
#include "oops/oop.inline.hpp"

View File

@ -366,9 +366,10 @@ public:
}
// Reset top and unused memory
space->set_top(get_compaction_top(i));
if (ZapUnusedHeapArea) {
space->mangle_unused_area();
HeapWord* new_top = get_compaction_top(i);
space->set_top(new_top);
if (ZapUnusedHeapArea && new_top < top) {
space->mangle_unused_area(MemRegion(new_top, top));
}
}
}

View File

@ -453,7 +453,6 @@ bool SerialHeap::do_young_collection(bool clear_soft_refs) {
print_heap_before_gc();
const PreGenGCValues pre_gc_values = get_pre_gc_values();
record_gen_tops_before_GC();
increment_total_collections(false);
const bool should_verify = total_collections() >= VerifyGCStartAt;
if (should_verify && VerifyBeforeGC) {
@ -956,12 +955,3 @@ void SerialHeap::gc_epilogue(bool full) {
MetaspaceCounters::update_performance_counters();
};
#ifndef PRODUCT
void SerialHeap::record_gen_tops_before_GC() {
if (ZapUnusedHeapArea) {
_young_gen->record_spaces_top();
_old_gen->record_spaces_top();
}
}
#endif // not PRODUCT

View File

@ -294,9 +294,6 @@ private:
HeapWord* mem_allocate_work(size_t size,
bool is_tlab);
// Save the tops of the spaces in all generations
void record_gen_tops_before_GC() PRODUCT_RETURN;
private:
MemoryPool* _eden_pool;
MemoryPool* _survivor_pool;

View File

@ -55,8 +55,7 @@ bool TenuredGeneration::grow_by(size_t bytes) {
// Fix for bug #4668531
if (ZapUnusedHeapArea) {
MemRegion mangle_region(space()->end(),
(HeapWord*)_virtual_space.high());
MemRegion mangle_region(space()->end(), (HeapWord*)_virtual_space.high());
SpaceMangler::mangle_region(mangle_region);
}
@ -485,14 +484,6 @@ void TenuredGeneration::complete_loaded_archive_space(MemRegion archive_space) {
void TenuredGeneration::gc_epilogue() {
// update the generation and space performance counters
update_counters();
if (ZapUnusedHeapArea) {
_the_space->check_mangled_unused_area_complete();
}
}
void TenuredGeneration::record_spaces_top() {
assert(ZapUnusedHeapArea, "Not mangling unused space");
_the_space->set_top_for_allocations();
}
void TenuredGeneration::verify() {

View File

@ -156,8 +156,6 @@ public:
// Performance Counter support
void update_counters();
void record_spaces_top();
// Statistics
void update_gc_stats(Generation* current_generation, bool full);

View File

@ -27,7 +27,7 @@
#include "classfile/vmSymbols.hpp"
#include "gc/shared/collectedHeap.inline.hpp"
#include "gc/shared/space.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "gc/shared/spaceDecorator.hpp"
#include "memory/iterator.inline.hpp"
#include "memory/universe.hpp"
#include "oops/oop.inline.hpp"
@ -42,13 +42,7 @@
ContiguousSpace::ContiguousSpace():
_bottom(nullptr),
_end(nullptr),
_top(nullptr) {
_mangler = new GenSpaceMangler(this);
}
ContiguousSpace::~ContiguousSpace() {
delete _mangler;
}
_top(nullptr) {}
void ContiguousSpace::initialize(MemRegion mr,
bool clear_space,
@ -73,26 +67,14 @@ void ContiguousSpace::clear(bool mangle_space) {
#ifndef PRODUCT
void ContiguousSpace::set_top_for_allocations() {
mangler()->set_top_for_allocations(top());
}
void ContiguousSpace::check_mangled_unused_area(HeapWord* limit) {
mangler()->check_mangled_unused_area(limit);
}
void ContiguousSpace::check_mangled_unused_area_complete() {
mangler()->check_mangled_unused_area_complete();
}
// Mangled only the unused space that has not previously
// been mangled and that has not been allocated since being
// mangled.
void ContiguousSpace::mangle_unused_area() {
mangler()->mangle_unused_area();
mangle_unused_area(MemRegion(_top, _end));
}
void ContiguousSpace::mangle_unused_area_complete() {
mangler()->mangle_unused_area_complete();
void ContiguousSpace::mangle_unused_area(MemRegion mr) {
SpaceMangler::mangle_region(mr);
}
#endif // NOT_PRODUCT
void ContiguousSpace::print() const { print_on(tty); }

View File

@ -41,9 +41,6 @@
// implementations for keeping track of free and used space,
// for iterating over objects and free blocks, etc.
// Forward decls.
class GenSpaceMangler;
// A space in which the free area is contiguous. It therefore supports
// faster allocation, and compaction.
//
@ -57,10 +54,6 @@ private:
HeapWord* _bottom;
HeapWord* _end;
HeapWord* _top;
// A helper for mangling the unused area of the space in debug builds.
GenSpaceMangler* _mangler;
GenSpaceMangler* mangler() { return _mangler; }
// Allocation helpers (return null if full).
inline HeapWord* allocate_impl(size_t word_size);
@ -68,7 +61,6 @@ private:
public:
ContiguousSpace();
~ContiguousSpace();
// Accessors
HeapWord* bottom() const { return _bottom; }
@ -115,24 +107,8 @@ public:
// had allocation performed in it, but is now to be considered empty.
void clear(bool mangle_space);
// Used to save the space's current top for later use during mangling.
void set_top_for_allocations() PRODUCT_RETURN;
// For detecting GC bugs. Should only be called at GC boundaries, since
// some unused space may be used as scratch space during GC's.
// We also call this when expanding a space to satisfy an allocation
// request. See bug #4668531
// Mangle regions in the space from the current top up to the
// previously mangled part of the space.
void mangle_unused_area() PRODUCT_RETURN;
// Mangle [top, end)
void mangle_unused_area_complete() PRODUCT_RETURN;
// Do some sparse checking on the area that should have been mangled.
void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
// Check the complete area that should have been mangled.
// This code may be null depending on the macro DEBUG_MANGLING.
void check_mangled_unused_area_complete() PRODUCT_RETURN;
void mangle_unused_area(MemRegion mr) PRODUCT_RETURN;
MemRegion used_region() const { return MemRegion(bottom(), top()); }

View File

@ -23,117 +23,15 @@
*/
#include "precompiled.hpp"
#include "gc/parallel/mutableSpace.hpp"
#include "gc/shared/space.hpp"
#include "gc/shared/spaceDecorator.inline.hpp"
#include "logging/log.hpp"
#include "gc/shared/spaceDecorator.hpp"
#include "utilities/copy.hpp"
// Catch-all file for utility classes
#ifndef PRODUCT
// Returns true is the location q matches the mangling
// pattern.
bool SpaceMangler::is_mangled(HeapWord* q) {
// This test loses precision but is good enough
return badHeapWord == (max_juint & reinterpret_cast<uintptr_t>(*q));
}
void SpaceMangler::set_top_for_allocations(HeapWord* v) {
if (v < end()) {
assert(!CheckZapUnusedHeapArea || is_mangled(v),
"The high water mark is not mangled");
}
_top_for_allocations = v;
}
// Mangle only the unused space that has not previously
// been mangled and that has not been allocated since being
// mangled.
void SpaceMangler::mangle_unused_area() {
assert(ZapUnusedHeapArea, "Mangling should not be in use");
// Mangle between top and the high water mark. Safeguard
// against the space changing since top_for_allocations was
// set.
HeapWord* mangled_end = MIN2(top_for_allocations(), end());
if (top() < mangled_end) {
MemRegion mangle_mr(top(), mangled_end);
SpaceMangler::mangle_region(mangle_mr);
// Light weight check of mangling.
check_mangled_unused_area(end());
}
// Complete check of unused area which is functional when
// DEBUG_MANGLING is defined.
check_mangled_unused_area_complete();
}
// A complete mangle is expected in the
// exceptional case where top_for_allocations is not
// properly tracking the high water mark for mangling.
// This can be the case when to-space is being used for
// scratch space during a mark-sweep-compact. See
// contribute_scratch().
void SpaceMangler::mangle_unused_area_complete() {
assert(ZapUnusedHeapArea, "Mangling should not be in use");
MemRegion mangle_mr(top(), end());
SpaceMangler::mangle_region(mangle_mr);
}
#ifdef ASSERT
// Simply mangle the MemRegion mr.
void SpaceMangler::mangle_region(MemRegion mr) {
assert(ZapUnusedHeapArea, "Mangling should not be in use");
#ifdef ASSERT
Copy::fill_to_words(mr.start(), mr.word_size(), badHeapWord);
#endif
}
// Check that top, top_for_allocations and the last
// word of the space are mangled. In a tight memory
// situation even this light weight mangling could
// cause paging by touching the end of the space.
void SpaceMangler::check_mangled_unused_area(HeapWord* limit) {
if (CheckZapUnusedHeapArea) {
// This method can be called while the spaces are
// being reshaped so skip the test if the end of the
// space is beyond the specified limit;
if (end() > limit) return;
assert(top() == end() ||
(is_mangled(top())), "Top not mangled");
assert((top_for_allocations() < top()) ||
(top_for_allocations() >= end()) ||
(is_mangled(top_for_allocations())),
"Older unused not mangled");
assert(top() == end() ||
(is_mangled(end() - 1)), "End not properly mangled");
// Only does checking when DEBUG_MANGLING is defined.
check_mangled_unused_area_complete();
}
}
#undef DEBUG_MANGLING
// This should only be used while debugging the mangling
// because of the high cost of checking the completeness.
void SpaceMangler::check_mangled_unused_area_complete() {
if (CheckZapUnusedHeapArea) {
assert(ZapUnusedHeapArea, "Not mangling unused area");
#ifdef DEBUG_MANGLING
HeapWord* q = top();
HeapWord* limit = end();
bool passed = true;
while (q < limit) {
if (!is_mangled(q)) {
passed = false;
break;
}
q++;
}
assert(passed, "Mangling is not complete");
#endif
}
}
#undef DEBUG_MANGLING
#endif // not PRODUCT
#endif // ASSERT

View File

@ -29,7 +29,7 @@
#include "memory/memRegion.hpp"
#include "utilities/globalDefinitions.hpp"
class SpaceDecorator: public AllStatic {
class SpaceDecorator : AllStatic {
public:
// Initialization flags.
static const bool Clear = true;
@ -38,114 +38,8 @@ class SpaceDecorator: public AllStatic {
static const bool DontMangle = false;
};
// Functionality for use with class Space and class MutableSpace.
// The approach taken with the mangling is to mangle all
// the space initially and then to mangle areas that have
// been allocated since the last collection. Mangling is
// done in the context of a generation and in the context
// of a space.
// The space in a generation is mangled when it is first
// initialized and when the generation grows. The spaces
// are not necessarily up-to-date when this mangling occurs
// and the method mangle_region() is used.
// After allocations have been done in a space, the space generally
// need to be remangled. Remangling is only done on the
// recently allocated regions in the space. Typically, that is
// the region between the new top and the top just before a
// garbage collection.
// An exception to the usual mangling in a space is done when the
// space is used for an extraordinary purpose. Specifically, when
// to-space is used as scratch space for a mark-sweep-compact
// collection.
// Spaces are mangled after a collection. If the generation
// grows after a collection, the added space is mangled as part of
// the growth of the generation. No additional mangling is needed when the
// spaces are resized after an expansion.
// The class SpaceMangler keeps a pointer to the top of the allocated
// area and provides the methods for doing the piece meal mangling.
// Methods for doing spaces and full checking of the mangling are
// included. The full checking is done if DEBUG_MANGLING is defined.
// GenSpaceMangler is used with the SerialHeap collectors and
// MutableSpaceMangler is used with the ParallelScavengeHeap collectors.
// These subclasses abstract the differences in the types of spaces used
// by each heap.
class SpaceMangler: public CHeapObj<mtGC> {
friend class VMStructs;
// High water mark for allocations. Typically, the space above
// this point have been mangle previously and don't need to be
// touched again. Space below this point has been allocated
// and remangling is needed between the current top and this
// high water mark.
HeapWord* _top_for_allocations;
HeapWord* top_for_allocations() { return _top_for_allocations; }
public:
// Setting _top_for_allocations to null at initialization
// makes it always below top so that mangling done as part
// of the initialize() call of a space does nothing (as it
// should since the mangling is done as part of the constructor
// for the space.
SpaceMangler() : _top_for_allocations(nullptr) {}
// Methods for top and end that delegate to the specific
// space type.
virtual HeapWord* top() const = 0;
virtual HeapWord* end() const = 0;
// Return true if q matches the mangled pattern.
static bool is_mangled(HeapWord* q) PRODUCT_RETURN0;
// Used to save the address in a space for later use during mangling.
void set_top_for_allocations(HeapWord* v);
// Overwrites the unused portion of this space.
// Mangle only the region not previously mangled [top, top_previously_mangled)
void mangle_unused_area();
// Mangle all the unused region [top, end)
void mangle_unused_area_complete();
// Do some sparse checking on the area that should have been mangled.
void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
// Do a complete check of the area that should be mangled.
void check_mangled_unused_area_complete() PRODUCT_RETURN;
// Mangle the MemRegion. This is a non-space specific mangler. It
// is used during the initial mangling of a space before the space
// is fully constructed. Also is used when a generation is expanded
// and possibly before the spaces have been reshaped to to the new
// size of the generation.
static void mangle_region(MemRegion mr) PRODUCT_RETURN;
};
class ContiguousSpace;
class MutableSpace;
// For use with SerialHeap's
class GenSpaceMangler: public SpaceMangler {
ContiguousSpace* _sp;
ContiguousSpace* sp() { return _sp; }
HeapWord* top() const;
HeapWord* end() const;
public:
GenSpaceMangler(ContiguousSpace* sp) : SpaceMangler(), _sp(sp) {}
};
// For use with ParallelScavengeHeap's.
class MutableSpaceMangler: public SpaceMangler {
MutableSpace* _sp;
MutableSpace* sp() { return _sp; }
HeapWord* top() const;
HeapWord* end() const;
public:
MutableSpaceMangler(MutableSpace* sp) : SpaceMangler(), _sp(sp) {}
struct SpaceMangler : AllStatic {
static void mangle_region(MemRegion mr) NOT_DEBUG_RETURN;
};
#endif // SHARE_GC_SHARED_SPACEDECORATOR_HPP

View File

@ -1,40 +0,0 @@
/*
* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_GC_SHARED_SPACEDECORATOR_INLINE_HPP
#define SHARE_GC_SHARED_SPACEDECORATOR_INLINE_HPP
#include "gc/shared/spaceDecorator.hpp"
#include "gc/parallel/mutableSpace.hpp"
#include "gc/shared/space.hpp"
#include "utilities/globalDefinitions.hpp"
inline HeapWord* GenSpaceMangler::top() const { return _sp->top(); }
inline HeapWord* GenSpaceMangler::end() const { return _sp->end(); }
inline HeapWord* MutableSpaceMangler::top() const { return _sp->top(); }
inline HeapWord* MutableSpaceMangler::end() const { return _sp->end(); }
#endif // SHARE_GC_SHARED_SPACEDECORATOR_INLINE_HPP

View File

@ -473,9 +473,6 @@ const int ObjectAlignmentInBytes = 8;
develop(bool, ZapUnusedHeapArea, trueInDebug, \
"Zap unused heap space") \
\
develop(bool, CheckZapUnusedHeapArea, false, \
"Check zapping of unused heap space") \
\
develop(bool, ZapFillerObjects, trueInDebug, \
"Zap filler objects") \
\