8337269: G1ConfidencePercent interpreted inconsistently

Reviewed-by: kbarrett, iwalulya
This commit is contained in:
Thomas Schatzl 2024-10-15 08:46:23 +00:00
parent 521effe017
commit 3b8a2f8c45
3 changed files with 11 additions and 11 deletions

View File

@ -53,7 +53,7 @@
#include "gc/shared/gcTraceTime.inline.hpp"
G1Policy::G1Policy(STWGCTimer* gc_timer) :
_predictor(G1ConfidencePercent / 100.0),
_predictor((100 - G1ConfidencePercent) / 100.0),
_analytics(new G1Analytics(&_predictor)),
_remset_tracker(),
_mmu_tracker(new G1MMUTracker(GCPauseIntervalMillis / 1000.0, MaxGCPauseMillis / 1000.0)),

View File

@ -29,8 +29,9 @@
// Utility class containing various helper methods for prediction.
class G1Predictions {
private:
double _sigma;
private:
// Scale factor indicating to which degree stddev should be taking into account in predictions.
double _stddev_scale;
// This function is used to estimate the stddev of sample sets. There is some
// special consideration of small sample sets: the actual stddev for them is
@ -46,16 +47,14 @@ class G1Predictions {
}
return estimate;
}
public:
G1Predictions(double sigma) : _sigma(sigma) {
assert(sigma >= 0.0, "Confidence must be larger than or equal to zero");
public:
G1Predictions(double stddev_scale) : _stddev_scale(stddev_scale) {
assert(stddev_scale >= 0.0, "must be");
}
// Confidence factor.
double sigma() const { return _sigma; }
double predict(TruncatedSeq const* seq) const {
return seq->davg() + _sigma * stddev_estimate(seq);
return seq->davg() + _stddev_scale * stddev_estimate(seq);
}
double predict_in_unit_interval(TruncatedSeq const* seq) const {

View File

@ -111,7 +111,8 @@
range(1, max_intx) \
\
product(uint, G1ConfidencePercent, 50, \
"Confidence level for MMU/pause predictions") \
"Confidence level for MMU/pause predictions. A higher value " \
"means that G1 will use less safety margin for its predictions.") \
range(1, 100) \
\
product(uintx, G1SummarizeRSetStatsPeriod, 0, DIAGNOSTIC, \