mirror of
https://github.com/openjdk/jdk.git
synced 2026-02-06 00:18:34 +00:00
8241583: Shenandoah: turn heap lock asserts into macros
Reviewed-by: rkennke
This commit is contained in:
parent
d1b506597f
commit
7fc31eadf3
@ -376,3 +376,40 @@ void ShenandoahAsserts::assert_locked_or_shenandoah_safepoint(Mutex* lock, const
|
||||
ShenandoahMessageBuffer msg("Must ba at a Shenandoah safepoint or held %s lock", lock->name());
|
||||
report_vm_error(file, line, msg.buffer());
|
||||
}
|
||||
|
||||
void ShenandoahAsserts::assert_heaplocked(const char* file, int line) {
|
||||
ShenandoahHeap* heap = ShenandoahHeap::heap();
|
||||
|
||||
if (heap->lock()->owned_by_self()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ShenandoahMessageBuffer msg("Heap lock must be owned by current thread");
|
||||
report_vm_error(file, line, msg.buffer());
|
||||
}
|
||||
|
||||
void ShenandoahAsserts::assert_not_heaplocked(const char* file, int line) {
|
||||
ShenandoahHeap* heap = ShenandoahHeap::heap();
|
||||
|
||||
if (!heap->lock()->owned_by_self()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ShenandoahMessageBuffer msg("Heap lock must not be owned by current thread");
|
||||
report_vm_error(file, line, msg.buffer());
|
||||
}
|
||||
|
||||
void ShenandoahAsserts::assert_heaplocked_or_safepoint(const char* file, int line) {
|
||||
ShenandoahHeap* heap = ShenandoahHeap::heap();
|
||||
|
||||
if (heap->lock()->owned_by_self()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (ShenandoahSafepoint::is_at_shenandoah_safepoint() && Thread::current()->is_VM_thread()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ShenandoahMessageBuffer msg("Heap lock must be owned by current thread, or be at safepoint");
|
||||
report_vm_error(file, line, msg.buffer());
|
||||
}
|
||||
|
||||
@ -69,6 +69,10 @@ public:
|
||||
|
||||
static void assert_locked_or_shenandoah_safepoint(Mutex* lock, const char* file, int line);
|
||||
|
||||
static void assert_heaplocked(const char* file, int line);
|
||||
static void assert_not_heaplocked(const char* file, int line);
|
||||
static void assert_heaplocked_or_safepoint(const char* file, int line);
|
||||
|
||||
#ifdef ASSERT
|
||||
#define shenandoah_assert_in_heap(interior_loc, obj) \
|
||||
ShenandoahAsserts::assert_in_heap(interior_loc, obj, __FILE__, __LINE__);
|
||||
@ -134,6 +138,15 @@ public:
|
||||
|
||||
#define shenandoah_assert_locked_or_safepoint(lock) \
|
||||
ShenandoahAsserts::assert_locked_or_shenandoah_safepoint(lock, __FILE__, __LINE__);
|
||||
|
||||
#define shenandoah_assert_heaplocked() \
|
||||
ShenandoahAsserts::assert_heaplocked(__FILE__, __LINE__)
|
||||
|
||||
#define shenandoah_assert_not_heaplocked() \
|
||||
ShenandoahAsserts::assert_not_heaplocked(__FILE__, __LINE__)
|
||||
|
||||
#define shenandoah_assert_heaplocked_or_safepoint() \
|
||||
ShenandoahAsserts::assert_heaplocked_or_safepoint(__FILE__, __LINE__)
|
||||
#else
|
||||
#define shenandoah_assert_in_heap(interior_loc, obj)
|
||||
#define shenandoah_assert_in_correct_region(interior_loc, obj)
|
||||
@ -172,6 +185,10 @@ public:
|
||||
#define shenandoah_assert_safepoint()
|
||||
#define shenandoah_assert_locked_or_safepoint(lock)
|
||||
|
||||
#define shenandoah_assert_heaplocked()
|
||||
#define shenandoah_assert_not_heaplocked()
|
||||
#define shenandoah_assert_heaplocked_or_safepoint()
|
||||
|
||||
#endif
|
||||
|
||||
#define shenandoah_not_implemented \
|
||||
|
||||
@ -42,7 +42,7 @@ ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeap* heap, size_t max_regions) :
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::increase_used(size_t num_bytes) {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
_used += num_bytes;
|
||||
|
||||
assert(_used <= _capacity, "must not use more than we have: used: " SIZE_FORMAT
|
||||
@ -262,7 +262,7 @@ void ShenandoahFreeSet::adjust_bounds() {
|
||||
}
|
||||
|
||||
HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req) {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
|
||||
size_t words_size = req.size();
|
||||
size_t num = ShenandoahHeapRegion::required_regions(words_size * HeapWordSize);
|
||||
@ -375,7 +375,7 @@ void ShenandoahFreeSet::try_recycle_trashed(ShenandoahHeapRegion *r) {
|
||||
|
||||
void ShenandoahFreeSet::recycle_trash() {
|
||||
// lock is not reentrable, check we don't have it
|
||||
assert_heaplock_not_owned_by_current_thread();
|
||||
shenandoah_assert_not_heaplocked();
|
||||
|
||||
for (size_t i = 0; i < _heap->num_regions(); i++) {
|
||||
ShenandoahHeapRegion* r = _heap->get_region(i);
|
||||
@ -407,7 +407,7 @@ void ShenandoahFreeSet::flip_to_gc(ShenandoahHeapRegion* r) {
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::clear() {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
clear_internal();
|
||||
}
|
||||
|
||||
@ -423,7 +423,7 @@ void ShenandoahFreeSet::clear_internal() {
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::rebuild() {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
clear();
|
||||
|
||||
for (size_t idx = 0; idx < _heap->num_regions(); idx++) {
|
||||
@ -464,7 +464,7 @@ void ShenandoahFreeSet::rebuild() {
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::log_status() {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
|
||||
LogTarget(Info, gc, ergo) lt;
|
||||
if (lt.is_enabled()) {
|
||||
@ -557,7 +557,7 @@ void ShenandoahFreeSet::log_status() {
|
||||
}
|
||||
|
||||
HeapWord* ShenandoahFreeSet::allocate(ShenandoahAllocRequest& req, bool& in_new_region) {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert_bounds();
|
||||
|
||||
if (req.size() > ShenandoahHeapRegion::humongous_threshold_words()) {
|
||||
@ -703,14 +703,6 @@ double ShenandoahFreeSet::external_fragmentation() {
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
void ShenandoahFreeSet::assert_heaplock_owned_by_current_thread() const {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::assert_heaplock_not_owned_by_current_thread() const {
|
||||
_heap->assert_heaplock_not_owned_by_current_thread();
|
||||
}
|
||||
|
||||
void ShenandoahFreeSet::assert_bounds() const {
|
||||
// Performance invariants. Failing these would not break the free set, but performance
|
||||
// would suffer.
|
||||
|
||||
@ -44,8 +44,6 @@ private:
|
||||
size_t _used;
|
||||
|
||||
void assert_bounds() const NOT_DEBUG_RETURN;
|
||||
void assert_heaplock_owned_by_current_thread() const NOT_DEBUG_RETURN;
|
||||
void assert_heaplock_not_owned_by_current_thread() const NOT_DEBUG_RETURN;
|
||||
|
||||
bool is_mutator_free(size_t idx) const;
|
||||
bool is_collector_free(size_t idx) const;
|
||||
|
||||
@ -610,12 +610,12 @@ size_t ShenandoahHeap::committed() const {
|
||||
}
|
||||
|
||||
void ShenandoahHeap::increase_committed(size_t bytes) {
|
||||
assert_heaplock_or_safepoint();
|
||||
shenandoah_assert_heaplocked_or_safepoint();
|
||||
_committed += bytes;
|
||||
}
|
||||
|
||||
void ShenandoahHeap::decrease_committed(size_t bytes) {
|
||||
assert_heaplock_or_safepoint();
|
||||
shenandoah_assert_heaplocked_or_safepoint();
|
||||
_committed -= bytes;
|
||||
}
|
||||
|
||||
@ -2521,20 +2521,6 @@ void ShenandoahHeap::op_final_updaterefs() {
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
void ShenandoahHeap::assert_heaplock_owned_by_current_thread() {
|
||||
_lock.assert_owned_by_current_thread();
|
||||
}
|
||||
|
||||
void ShenandoahHeap::assert_heaplock_not_owned_by_current_thread() {
|
||||
_lock.assert_not_owned_by_current_thread();
|
||||
}
|
||||
|
||||
void ShenandoahHeap::assert_heaplock_or_safepoint() {
|
||||
_lock.assert_owned_by_current_thread_or_safepoint();
|
||||
}
|
||||
#endif
|
||||
|
||||
void ShenandoahHeap::print_extended_on(outputStream *st) const {
|
||||
print_on(st);
|
||||
print_heap_regions_on(st);
|
||||
@ -2556,7 +2542,7 @@ bool ShenandoahHeap::is_bitmap_slice_committed(ShenandoahHeapRegion* r, bool ski
|
||||
}
|
||||
|
||||
bool ShenandoahHeap::commit_bitmap_slice(ShenandoahHeapRegion* r) {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
|
||||
// Bitmaps in special regions do not need commits
|
||||
if (_bitmap_region_special) {
|
||||
@ -2580,7 +2566,7 @@ bool ShenandoahHeap::commit_bitmap_slice(ShenandoahHeapRegion* r) {
|
||||
}
|
||||
|
||||
bool ShenandoahHeap::uncommit_bitmap_slice(ShenandoahHeapRegion *r) {
|
||||
assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
|
||||
// Bitmaps in special regions do not need uncommits
|
||||
if (_bitmap_region_special) {
|
||||
|
||||
@ -129,10 +129,6 @@ public:
|
||||
return &_lock;
|
||||
}
|
||||
|
||||
void assert_heaplock_owned_by_current_thread() NOT_DEBUG_RETURN;
|
||||
void assert_heaplock_not_owned_by_current_thread() NOT_DEBUG_RETURN;
|
||||
void assert_heaplock_or_safepoint() NOT_DEBUG_RETURN;
|
||||
|
||||
// ---------- Initialization, termination, identification, printing routines
|
||||
//
|
||||
public:
|
||||
|
||||
@ -87,7 +87,7 @@ void ShenandoahHeapRegion::report_illegal_transition(const char *method) {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_regular_allocation() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
|
||||
switch (_state) {
|
||||
case _empty_uncommitted:
|
||||
@ -103,7 +103,7 @@ void ShenandoahHeapRegion::make_regular_allocation() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_regular_bypass() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert (_heap->is_full_gc_in_progress() || _heap->is_degenerated_gc_in_progress(),
|
||||
"only for full or degen GC");
|
||||
|
||||
@ -128,7 +128,7 @@ void ShenandoahHeapRegion::make_regular_bypass() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_humongous_start() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _empty_uncommitted:
|
||||
do_commit();
|
||||
@ -141,7 +141,7 @@ void ShenandoahHeapRegion::make_humongous_start() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_humongous_start_bypass() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert (_heap->is_full_gc_in_progress(), "only for full GC");
|
||||
|
||||
switch (_state) {
|
||||
@ -157,7 +157,7 @@ void ShenandoahHeapRegion::make_humongous_start_bypass() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_humongous_cont() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _empty_uncommitted:
|
||||
do_commit();
|
||||
@ -170,7 +170,7 @@ void ShenandoahHeapRegion::make_humongous_cont() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_humongous_cont_bypass() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert (_heap->is_full_gc_in_progress(), "only for full GC");
|
||||
|
||||
switch (_state) {
|
||||
@ -186,7 +186,7 @@ void ShenandoahHeapRegion::make_humongous_cont_bypass() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_pinned() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert(pin_count() > 0, "Should have pins: " SIZE_FORMAT, pin_count());
|
||||
|
||||
switch (_state) {
|
||||
@ -208,7 +208,7 @@ void ShenandoahHeapRegion::make_pinned() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_unpinned() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert(pin_count() == 0, "Should not have pins: " SIZE_FORMAT, pin_count());
|
||||
|
||||
switch (_state) {
|
||||
@ -230,7 +230,7 @@ void ShenandoahHeapRegion::make_unpinned() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_cset() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _regular:
|
||||
set_state(_cset);
|
||||
@ -242,7 +242,7 @@ void ShenandoahHeapRegion::make_cset() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_trash() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _cset:
|
||||
// Reclaiming cset regions
|
||||
@ -267,7 +267,7 @@ void ShenandoahHeapRegion::make_trash_immediate() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_empty() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _trash:
|
||||
set_state(_empty_committed);
|
||||
@ -279,7 +279,7 @@ void ShenandoahHeapRegion::make_empty() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_uncommitted() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
switch (_state) {
|
||||
case _empty_committed:
|
||||
do_uncommit();
|
||||
@ -291,7 +291,7 @@ void ShenandoahHeapRegion::make_uncommitted() {
|
||||
}
|
||||
|
||||
void ShenandoahHeapRegion::make_committed_bypass() {
|
||||
_heap->assert_heaplock_owned_by_current_thread();
|
||||
shenandoah_assert_heaplocked();
|
||||
assert (_heap->is_full_gc_in_progress(), "only for full GC");
|
||||
|
||||
switch (_state) {
|
||||
@ -330,7 +330,7 @@ void ShenandoahHeapRegion::reset_alloc_metadata_to_shared() {
|
||||
|
||||
void ShenandoahHeapRegion::update_seqnum_last_alloc_mutator() {
|
||||
assert(_heap->is_traversal_mode(), "Sanity");
|
||||
_heap->assert_heaplock_or_safepoint();
|
||||
shenandoah_assert_heaplocked_or_safepoint();
|
||||
_seqnum_last_alloc_mutator = _alloc_seq_num.value++;
|
||||
}
|
||||
|
||||
|
||||
@ -420,7 +420,7 @@ public:
|
||||
}
|
||||
|
||||
void set_update_watermark(HeapWord* w) {
|
||||
_heap->assert_heaplock_or_safepoint();
|
||||
shenandoah_assert_heaplocked_or_safepoint();
|
||||
assert(bottom() <= w && w <= top(), "within bounds");
|
||||
_update_watermark = w;
|
||||
}
|
||||
|
||||
@ -31,8 +31,7 @@
|
||||
#include "runtime/atomic.hpp"
|
||||
|
||||
HeapWord* ShenandoahHeapRegion::allocate(size_t size, ShenandoahAllocRequest::Type type) {
|
||||
_heap->assert_heaplock_or_safepoint();
|
||||
|
||||
shenandoah_assert_heaplocked_or_safepoint();
|
||||
assert(is_object_aligned(size), "alloc size breaks alignment: " SIZE_FORMAT, size);
|
||||
|
||||
HeapWord* obj = top();
|
||||
|
||||
@ -62,23 +62,14 @@ public:
|
||||
Thread::SpinRelease(&_state);
|
||||
}
|
||||
|
||||
bool owned_by_self() {
|
||||
#ifdef ASSERT
|
||||
void assert_owned_by_current_thread() {
|
||||
assert(_state == locked, "must be locked");
|
||||
assert(_owner == Thread::current(), "must be owned by current thread");
|
||||
}
|
||||
|
||||
void assert_not_owned_by_current_thread() {
|
||||
assert(_owner != Thread::current(), "must be not owned by current thread");
|
||||
}
|
||||
|
||||
void assert_owned_by_current_thread_or_safepoint() {
|
||||
Thread* thr = Thread::current();
|
||||
assert((_state == locked && _owner == thr) ||
|
||||
(SafepointSynchronize::is_at_safepoint() && thr->is_VM_thread()),
|
||||
"must own heap lock or by VM thread at safepoint");
|
||||
}
|
||||
return _state == locked && _owner == Thread::current();
|
||||
#else
|
||||
ShouldNotReachHere();
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
class ShenandoahLocker : public StackObj {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user