8367530: The exhaustiveness errors could be improved

Reviewed-by: vromero, mcimadamore
This commit is contained in:
Jan Lahoda 2026-02-04 11:03:56 +00:00
parent d7523ec8d2
commit 84e8787d1f
16 changed files with 1449 additions and 99 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -41,11 +41,17 @@ import com.sun.tools.javac.code.Kinds.Kind;
import com.sun.tools.javac.code.Type.TypeVar;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.SequencedSet;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.stream.Collectors.groupingBy;
import static com.sun.tools.javac.code.Flags.RECORD;
/** A class to compute exhaustiveness of set of switch cases.
*
@ -55,6 +61,14 @@ import static java.util.stream.Collectors.groupingBy;
* deletion without notice.</b>
*/
public class ExhaustivenessComputer {
private static final long DEFAULT_MAX_BASE_CHECKS = 4_000_000;
//when baseChecks is set to a value different that this, the checks
//will be counter, and if too many will happen, the process will be stopped
//when baseChecks is set to this value, there's no counting, and the
//process will not continue as long as needed
private static final long NO_BASE_CHECKS_COUNTING = -1;
protected static final Context.Key<ExhaustivenessComputer> exhaustivenessKey = new Context.Key<>();
private final Symtab syms;
@ -62,6 +76,8 @@ public class ExhaustivenessComputer {
private final Check chk;
private final Infer infer;
private final Map<Pair<Type, Type>, Boolean> isSubtypeCache = new HashMap<>();
private final long maxBaseChecks;
private long baseChecks = NO_BASE_CHECKS_COUNTING;
public static ExhaustivenessComputer instance(Context context) {
ExhaustivenessComputer instance = context.get(exhaustivenessKey);
@ -77,9 +93,22 @@ public class ExhaustivenessComputer {
types = Types.instance(context);
chk = Check.instance(context);
infer = Infer.instance(context);
Options options = Options.instance(context);
String baseChecks = options.get("exhaustivityMaxBaseChecks");
long computedMaxBaseChecks = DEFAULT_MAX_BASE_CHECKS;
if (baseChecks != null) {
try {
computedMaxBaseChecks = Long.parseLong(baseChecks);
} catch (NumberFormatException _) {
//ignore invalid values and use the default maximum number of checks
}
}
maxBaseChecks = computedMaxBaseChecks;
}
public boolean exhausts(JCExpression selector, List<JCCase> cases) {
public ExhaustivenessResult exhausts(JCExpression selector, List<JCCase> cases) {
Set<PatternDescription> patternSet = new HashSet<>();
Map<Symbol, Set<Symbol>> enum2Constants = new HashMap<>();
Set<Object> booleanLiterals = new HashSet<>(Set.of(0, 1));
@ -113,7 +142,7 @@ public class ExhaustivenessComputer {
}
if (types.unboxedTypeOrType(selector.type).hasTag(TypeTag.BOOLEAN) && booleanLiterals.isEmpty()) {
return true;
return ExhaustivenessResult.ofExhaustive();
}
for (Entry<Symbol, Set<Symbol>> e : enum2Constants.entrySet()) {
@ -121,47 +150,77 @@ public class ExhaustivenessComputer {
patternSet.add(new BindingPattern(e.getKey().type));
}
}
Set<PatternDescription> patterns = patternSet;
Set<Set<PatternDescription>> seenFallback = new HashSet<>();
boolean useHashes = true;
try {
boolean repeat = true;
while (repeat) {
Set<PatternDescription> updatedPatterns;
updatedPatterns = reduceBindingPatterns(selector.type, patterns);
updatedPatterns = reduceNestedPatterns(updatedPatterns, useHashes);
updatedPatterns = reduceRecordPatterns(updatedPatterns);
updatedPatterns = removeCoveredRecordPatterns(updatedPatterns);
repeat = !updatedPatterns.equals(patterns);
if (checkCovered(selector.type, patterns)) {
return true;
}
if (!repeat) {
//there may be situation like:
//class B permits S1, S2
//patterns: R(S1, B), R(S2, S2)
//this might be joined to R(B, S2), as B could be rewritten to S2
//but hashing in reduceNestedPatterns will not allow that
//disable the use of hashing, and use subtyping in
//reduceNestedPatterns to handle situations like this:
repeat = useHashes && seenFallback.add(updatedPatterns);
useHashes = false;
} else {
//if a reduction happened, make sure hashing in reduceNestedPatterns
//is enabled, as the hashing speeds up the process significantly:
useHashes = true;
}
patterns = updatedPatterns;
CoverageResult coveredResult = computeCoverage(selector.type, patternSet, PatternEquivalence.STRICT);
if (coveredResult.covered()) {
return ExhaustivenessResult.ofExhaustive();
}
return checkCovered(selector.type, patterns);
Set<PatternDescription> details =
this.computeMissingPatternDescriptions(selector.type, coveredResult.incompletePatterns())
.stream()
.flatMap(pd -> {
if (pd instanceof BindingPattern bp && enum2Constants.containsKey(bp.type.tsym)) {
Symbol enumType = bp.type.tsym;
return enum2Constants.get(enumType).stream().map(c -> new EnumConstantPattern(bp.type, c.name));
} else {
return Stream.of(pd);
}
})
.collect(Collectors.toSet());
return ExhaustivenessResult.ofDetails(details);
} catch (CompletionFailure cf) {
chk.completionError(selector.pos(), cf);
return true; //error recovery
} finally {
isSubtypeCache.clear();
return ExhaustivenessResult.ofExhaustive(); //error recovery
}
}
/* Given the set of patterns, runs the reductions of it as long as possible.
* If the (reduced) set of patterns covers the given selector type, returns
* covered == true, and incompletePatterns == null.
* If the (reduced) set of patterns does not cover the given selector type,
* returns covered == false, and incompletePatterns == the reduced set of patterns.
*/
private CoverageResult computeCoverage(Type selectorType, Set<PatternDescription> patterns, PatternEquivalence patternEquivalence) {
Set<PatternDescription> updatedPatterns;
Set<Set<PatternDescription>> seenPatterns = new HashSet<>();
boolean useHashes = true;
boolean repeat = true;
do {
updatedPatterns = reduceBindingPatterns(selectorType, patterns);
updatedPatterns = reduceNestedPatterns(updatedPatterns, useHashes, patternEquivalence);
updatedPatterns = reduceRecordPatterns(updatedPatterns);
updatedPatterns = removeCoveredRecordPatterns(updatedPatterns);
repeat = !updatedPatterns.equals(patterns);
if (checkCovered(selectorType, patterns)) {
return new CoverageResult(true, null);
}
if (!repeat) {
//there may be situation like:
//class B permits S1, S2
//patterns: R(S1, B), R(S2, S2)
//this might be joined to R(B, S2), as B could be rewritten to S2
//but hashing in reduceNestedPatterns will not allow that
//disable the use of hashing, and use subtyping in
//reduceNestedPatterns to handle situations like this:
repeat = useHashes && seenPatterns.add(updatedPatterns);
useHashes = false;
} else {
//if a reduction happened, make sure hashing in reduceNestedPatterns
//is enabled, as the hashing speeds up the process significantly:
useHashes = true;
}
patterns = updatedPatterns;
} while (repeat);
if (checkCovered(selectorType, patterns)) {
return new CoverageResult(true, null);
}
return new CoverageResult(false, patterns);
}
private record CoverageResult(boolean covered, Set<PatternDescription> incompletePatterns) {}
private boolean checkCovered(Type seltype, Iterable<PatternDescription> patterns) {
for (Type seltypeComponent : components(seltype)) {
for (PatternDescription pd : patterns) {
@ -215,6 +274,7 @@ public class ExhaustivenessComputer {
if (clazz.isSealed() && clazz.isAbstract() &&
//if a binding pattern for clazz already exists, no need to analyze it again:
!existingBindings.contains(clazz)) {
ListBuffer<PatternDescription> bindings = new ListBuffer<>();
//do not reduce to types unrelated to the selector type:
Type clazzType = clazz.type;
if (components(selectorType).stream()
@ -222,16 +282,7 @@ public class ExhaustivenessComputer {
continue;
}
Set<Symbol> permitted = allPermittedSubTypes(clazz, csym -> {
Type instantiated;
if (csym.type.allparams().isEmpty()) {
instantiated = csym.type;
} else {
instantiated = infer.instantiatePatternType(selectorType, csym);
}
return instantiated != null && types.isCastable(selectorType, instantiated);
});
Set<Symbol> permitted = allPermittedSubTypes(clazz, isApplicableSubtypePredicate(selectorType));
//the set of pending permitted subtypes needed to cover clazz:
Set<Symbol> pendingPermitted = new HashSet<>(permitted);
@ -263,7 +314,7 @@ public class ExhaustivenessComputer {
}
if (pendingPermitted.isEmpty()) {
toAdd.add(new BindingPattern(clazz.type));
toAdd.add(new BindingPattern(clazz.type, Set.of()));
}
}
}
@ -304,6 +355,49 @@ public class ExhaustivenessComputer {
return permitted;
}
private <C extends TypeSymbol> Predicate<C> isApplicableSubtypePredicate(Type targetType) {
return csym -> {
Type instantiated = instantiatePatternType(targetType, csym);
return instantiated != null && types.isCastable(targetType, instantiated);
};
}
private Type instantiatePatternType(Type targetType, TypeSymbol csym) {
if (csym.type.allparams().isEmpty()) {
return csym.type;
} else {
return infer.instantiatePatternType(targetType, csym);
}
}
private Set<ClassSymbol> leafPermittedSubTypes(TypeSymbol root, Predicate<ClassSymbol> accept) {
Set<ClassSymbol> permitted = new HashSet<>();
List<ClassSymbol> permittedSubtypesClosure = baseClasses(root);
while (permittedSubtypesClosure.nonEmpty()) {
ClassSymbol current = permittedSubtypesClosure.head;
permittedSubtypesClosure = permittedSubtypesClosure.tail;
current.complete();
if (current.isSealed() && current.isAbstract()) {
for (Type t : current.getPermittedSubclasses()) {
ClassSymbol csym = (ClassSymbol) t.tsym;
if (accept.test(csym)) {
permittedSubtypesClosure = permittedSubtypesClosure.prepend(csym);
}
}
} else {
permitted.add(current);
}
}
return permitted;
}
private List<ClassSymbol> baseClasses(TypeSymbol root) {
if (root instanceof ClassSymbol clazz) {
return List.of(clazz);
@ -336,7 +430,8 @@ public class ExhaustivenessComputer {
* as pattern hashes cannot be used to speed up the matching process
*/
private Set<PatternDescription> reduceNestedPatterns(Set<PatternDescription> patterns,
boolean useHashes) {
boolean useHashes,
PatternEquivalence patternEquivalence) {
/* implementation note:
* finding a sub-set of patterns that only differ in a single
* column is time-consuming task, so this method speeds it up by:
@ -386,13 +481,13 @@ public class ExhaustivenessComputer {
RecordPattern rpOther = candidatesArr[nextCandidate];
if (rpOne.recordType.tsym == rpOther.recordType.tsym &&
nestedComponentsEquivalent(rpOne, rpOther, mismatchingCandidate, useHashes)) {
nestedComponentsEquivalent(rpOne, rpOther, mismatchingCandidate, useHashes, patternEquivalence)) {
join.append(rpOther);
}
}
var nestedPatterns = join.stream().map(rp -> rp.nested[mismatchingCandidateFin]).collect(Collectors.toSet());
var updatedPatterns = reduceNestedPatterns(nestedPatterns, useHashes);
var updatedPatterns = reduceNestedPatterns(nestedPatterns, useHashes, patternEquivalence);
updatedPatterns = reduceRecordPatterns(updatedPatterns);
updatedPatterns = removeCoveredRecordPatterns(updatedPatterns);
@ -403,16 +498,11 @@ public class ExhaustivenessComputer {
current.removeAll(join);
}
for (PatternDescription nested : updatedPatterns) {
PatternDescription[] newNested =
Arrays.copyOf(rpOne.nested, rpOne.nested.length);
newNested[mismatchingCandidateFin] = nested;
RecordPattern nue = new RecordPattern(rpOne.recordType(),
rpOne.fullComponentTypes(),
newNested,
new HashSet<>(join));
current.add(nue);
}
generatePatternsWithReplacedNestedPattern(rpOne,
mismatchingCandidateFin,
updatedPatterns,
Set.copyOf(join),
current::add);
}
}
}
@ -434,11 +524,32 @@ public class ExhaustivenessComputer {
* - it's type is a supertype of the existing pattern's type
* - it was produced by a reduction from a record pattern that is equivalent to
* the existing pattern
* - only if PatternEquivalence is LOOSE and the type is the same of the type
* of an existing record pattern (the binding pattern may stand in place of
* a record pattern). This is only used to compute the missing patterns that
* would make the original pattern set exhaustive.
*
* For example, having (with mismatchingCandidate == 0):
* existing: R(A _, Box(var _)) {}
* cadidate: R(B _, Box(var _)) {}
* these are always equivalent; as all nested patterns except of
* component 0 are exactly equivalent
*
* existing: R(A _, SubtypeOfBox _) {}
* cadidate: R(A _, Box _) {}
* this is only equivalent when useHashes == false; Box _ could be replaced
* with a more specific SubtypeOfBox _
*
* existing: R(A _, Box(var _)) {}
* cadidate: R(A _, Box _) {}
* this is only equivalent when useHashes == false and patternEquivalence == LOOSE;
* Box _ is accepted in place of the more specific record pattern
*/
private boolean nestedComponentsEquivalent(RecordPattern existing,
RecordPattern candidate,
int mismatchingCandidate,
boolean useHashes) {
boolean useHashes,
PatternEquivalence patternEquivalence) {
NEXT_NESTED:
for (int i = 0; i < existing.nested.length; i++) {
if (i != mismatchingCandidate) {
@ -457,22 +568,28 @@ public class ExhaustivenessComputer {
return false;
}
} else if (existing.nested[i] instanceof RecordPattern nestedExisting) {
java.util.List<PatternDescription> pendingReplacedPatterns =
new ArrayList<>(nestedCandidate.sourcePatterns());
if (patternEquivalence == PatternEquivalence.LOOSE) {
if (!isSubtypeErasure(nestedExisting.recordType(), nestedCandidate.type)) {
return false;
}
} else {
java.util.List<PatternDescription> pendingReplacedPatterns =
new ArrayList<>(nestedCandidate.sourcePatterns());
while (!pendingReplacedPatterns.isEmpty()) {
PatternDescription currentReplaced = pendingReplacedPatterns.removeLast();
while (!pendingReplacedPatterns.isEmpty()) {
PatternDescription currentReplaced = pendingReplacedPatterns.removeLast();
if (nestedExisting.equals(currentReplaced)) {
//candidate.nested[i] is substitutable for existing.nested[i]
//continue with the next nested pattern:
continue NEXT_NESTED;
if (nestedExisting.equals(currentReplaced)) {
//candidate.nested[i] is substitutable for existing.nested[i]
//continue with the next nested pattern:
continue NEXT_NESTED;
}
pendingReplacedPatterns.addAll(currentReplaced.sourcePatterns());
}
pendingReplacedPatterns.addAll(currentReplaced.sourcePatterns());
return false;
}
return false;
} else {
return false;
}
@ -563,6 +680,8 @@ public class ExhaustivenessComputer {
}
private boolean isBpCovered(Type componentType, PatternDescription newNested) {
reportCheck();
if (newNested instanceof BindingPattern bp) {
Type seltype = types.erasure(componentType);
Type pattype = types.erasure(bp.type);
@ -574,9 +693,18 @@ public class ExhaustivenessComputer {
return false;
}
sealed interface PatternDescription {
protected void reportCheck() {
if (baseChecks != NO_BASE_CHECKS_COUNTING &&
++baseChecks > maxBaseChecks) {
throw new TooManyChecksException(null);
}
}
public sealed interface PatternDescription {
public Type type();
public Set<PatternDescription> sourcePatterns();
}
public PatternDescription makePatternDescription(Type selectorType, JCPattern pattern) {
if (pattern instanceof JCBindingPattern binding) {
Type type = !selectorType.isPrimitive() && types.isSubtype(selectorType, binding.type)
@ -586,9 +714,7 @@ public class ExhaustivenessComputer {
Type[] componentTypes;
if (!record.type.isErroneous()) {
componentTypes = ((ClassSymbol) record.type.tsym).getRecordComponents()
.map(r -> types.memberType(record.type, r))
.toArray(s -> new Type[s]);
componentTypes = instantiatedComponentTypes(record.type);
}
else {
componentTypes = record.nested.map(t -> types.createErrorType(t.type)).toArray(s -> new Type[s]);;
@ -611,7 +737,7 @@ public class ExhaustivenessComputer {
throw Assert.error();
}
}
record BindingPattern(Type type, Set<PatternDescription> sourcePatterns) implements PatternDescription {
public record BindingPattern(Type type, Set<PatternDescription> sourcePatterns) implements PatternDescription {
public BindingPattern(Type type) {
this(type, Set.of());
@ -631,7 +757,7 @@ public class ExhaustivenessComputer {
return type.tsym + " _";
}
}
record RecordPattern(Type recordType, int _hashCode, Type[] fullComponentTypes, PatternDescription[] nested, Set<PatternDescription> sourcePatterns) implements PatternDescription {
public record RecordPattern(Type recordType, int _hashCode, Type[] fullComponentTypes, PatternDescription[] nested, Set<PatternDescription> sourcePatterns) implements PatternDescription {
public RecordPattern(Type recordType, Type[] fullComponentTypes, PatternDescription[] nested) {
this(recordType, fullComponentTypes, nested, Set.of());
@ -673,5 +799,450 @@ public class ExhaustivenessComputer {
.map(pd -> pd.toString())
.collect(Collectors.joining(", ")) + ")";
}
@Override
public Type type() {
return recordType;
}
}
public record EnumConstantPattern(Type enumType, Name enumConstant) implements PatternDescription {
@Override
public Type type() {
return enumType();
}
@Override
public Set<PatternDescription> sourcePatterns() {
return Set.of();
}
public String toString() {
return enumType() + "." + enumConstant();
}
}
public record ExhaustivenessResult(boolean exhaustive, Set<PatternDescription> notExhaustiveDetails) {
public static ExhaustivenessResult ofExhaustive() {
return new ExhaustivenessResult(true, null);
}
public static ExhaustivenessResult ofDetails(Set<PatternDescription> notExhaustiveDetails) {
return new ExhaustivenessResult(false, notExhaustiveDetails != null ? notExhaustiveDetails : Set.of());
}
}
//computation of missing patterns:
protected Set<PatternDescription> computeMissingPatternDescriptions(Type selectorType,
Set<PatternDescription> incompletePatterns) {
if (maxBaseChecks == 0) {
return Set.of();
}
try {
baseChecks = 0;
PatternDescription defaultPattern = new BindingPattern(selectorType);
return expandMissingPatternDescriptions(selectorType,
selectorType,
defaultPattern,
incompletePatterns,
Set.of(defaultPattern));
} catch (TooManyChecksException ex) {
return ex.missingPatterns != null ? ex.missingPatterns : Set.of();
} finally {
baseChecks = NO_BASE_CHECKS_COUNTING;
}
}
private Set<PatternDescription> expandMissingPatternDescriptions(Type selectorType,
Type targetType,
PatternDescription toExpand,
Set<? extends PatternDescription> basePatterns,
Set<PatternDescription> inMissingPatterns) {
try {
return doExpandMissingPatternDescriptions(selectorType, targetType,
toExpand, basePatterns,
inMissingPatterns);
} catch (TooManyChecksException ex) {
if (ex.missingPatterns == null) {
ex = new TooManyChecksException(inMissingPatterns);
}
throw ex;
}
}
private Set<PatternDescription> doExpandMissingPatternDescriptions(Type selectorType,
Type targetType,
PatternDescription toExpand,
Set<? extends PatternDescription> basePatterns,
Set<PatternDescription> inMissingPatterns) {
if (toExpand instanceof BindingPattern bp) {
if (bp.type.tsym.isSealed()) {
//try to replace binding patterns for sealed types with all their immediate permitted applicable types:
List<Type> permitted = ((ClassSymbol) bp.type.tsym).getPermittedSubclasses();
Set<PatternDescription> applicableDirectPermittedPatterns =
permitted.stream()
.map(type -> type.tsym)
.filter(isApplicableSubtypePredicate(targetType))
.map(csym -> new BindingPattern(types.erasure(csym.type)))
.collect(Collectors.toCollection(LinkedHashSet::new));
//remove the permitted subtypes that are not needed to achieve exhaustivity
boolean reduced =
removeUnnecessaryPatterns(selectorType, bp, basePatterns, inMissingPatterns, applicableDirectPermittedPatterns);
if (!reduced && !hasMatchingRecordPattern(basePatterns, inMissingPatterns, toExpand)) {
//if all immediate permitted subtypes are needed
//give up, and simply use the current pattern:
return inMissingPatterns;
}
Set<PatternDescription> currentMissingPatterns =
replace(inMissingPatterns, toExpand, applicableDirectPermittedPatterns);
//try to recursively expand on each viable pattern:
for (PatternDescription viable : applicableDirectPermittedPatterns) {
currentMissingPatterns = expandMissingPatternDescriptions(selectorType, targetType,
viable, basePatterns,
currentMissingPatterns);
}
return currentMissingPatterns;
} else if ((bp.type.tsym.flags_field & Flags.RECORD) != 0 &&
//only expand record types into record patterns if there's a chance it may change the outcome
//i.e. there is a record pattern in at the spot in the original base patterns:
hasMatchingRecordPattern(basePatterns, inMissingPatterns, toExpand)) {
//if there is a binding pattern at a place where the original based patterns
//have a record pattern, try to expand the binding pattern into a record pattern
//create all possible combinations of record pattern components:
Type[] componentTypes = instantiatedComponentTypes(bp.type);
List<List<Type>> combinatorialNestedTypes = List.of(List.nil());
for (Type componentType : componentTypes) {
List<Type> applicableLeafPermittedSubtypes;
if (componentType.tsym.isSealed()) {
applicableLeafPermittedSubtypes =
leafPermittedSubTypes(componentType.tsym,
isApplicableSubtypePredicate(componentType))
.stream()
.map(csym -> instantiatePatternType(componentType, csym))
.collect(List.collector());
} else {
applicableLeafPermittedSubtypes = List.of(componentType);
}
List<List<Type>> newCombinatorialNestedTypes = List.nil();
for (List<Type> existing : combinatorialNestedTypes) {
for (Type nue : applicableLeafPermittedSubtypes) {
newCombinatorialNestedTypes = newCombinatorialNestedTypes.prepend(existing.append(nue));
}
}
combinatorialNestedTypes = newCombinatorialNestedTypes;
}
Set<PatternDescription> combinatorialPatterns =
combinatorialNestedTypes.stream()
.map(combination -> new RecordPattern(bp.type,
componentTypes,
combination.map(BindingPattern::new)
.toArray(PatternDescription[]::new)))
.collect(Collectors.toCollection(LinkedHashSet::new));
removeUnnecessaryPatterns(selectorType, bp, basePatterns, inMissingPatterns, combinatorialPatterns);
CoverageResult coverageResult = computeCoverage(targetType, combinatorialPatterns, PatternEquivalence.LOOSE);
if (!coverageResult.covered()) {
//use the partially merged/combined patterns:
combinatorialPatterns = coverageResult.incompletePatterns();
}
//combine sealed subtypes into the supertype, if all is covered.
//but preserve more specific record types in positions where there are record patterns in the original patterns
//this is particularly important for the case where the sealed supertype only has one permitted type, the record
//the base type could be used instead of the record otherwise, which would produce less specific missing pattern:
Set<PatternDescription> sortedCandidates =
partialSortPattern(combinatorialPatterns, basePatterns, replace(inMissingPatterns, toExpand, combinatorialPatterns));
removeUnnecessaryPatterns(selectorType, bp, basePatterns, inMissingPatterns, sortedCandidates);
Set<PatternDescription> currentMissingPatterns =
replace(inMissingPatterns, toExpand, sortedCandidates);
for (PatternDescription addedPattern : sortedCandidates) {
if (addedPattern instanceof RecordPattern addedRP) {
for (int c = 0; c < addedRP.nested.length; c++) {
currentMissingPatterns = expandMissingPatternDescriptions(selectorType,
addedRP.fullComponentTypes[c],
addedRP.nested[c],
basePatterns,
currentMissingPatterns);
}
}
}
return currentMissingPatterns;
}
}
return inMissingPatterns;
}
/*
* Inside any pattern in {@code in}, in any nesting depth, replace
* pattern {@code what} with patterns {@code to}.
*/
private Set<PatternDescription> replace(Iterable<? extends PatternDescription> in,
PatternDescription what,
Collection<? extends PatternDescription> to) {
Set<PatternDescription> result = new HashSet<>();
for (PatternDescription pd : in) {
Collection<? extends PatternDescription> replaced = replace(pd, what, to);
if (replaced != null) {
result.addAll(replaced);
} else {
result.add(pd);
}
}
return result;
}
//where:
//null: no change
private Collection<? extends PatternDescription> replace(PatternDescription in,
PatternDescription what,
Collection<? extends PatternDescription> to) {
if (in == what) {
return to;
} else if (in instanceof RecordPattern rp) {
for (int c = 0; c < rp.nested.length; c++) {
Collection<? extends PatternDescription> replaced = replace(rp.nested[c], what, to);
if (replaced != null) {
Set<PatternDescription> withReplaced = new HashSet<>();
generatePatternsWithReplacedNestedPattern(rp, c, replaced, Set.of(), withReplaced::add);
return replace(withReplaced, what, to);
}
}
return null;
} else {
return null; //binding patterns have no children
}
}
/* Out of "candidates" remove patterns that are not necessary to achieve exhaustiveness.
* Note that iteration order of "candidates" is important - if the set contains
* two pattern, out of which either, but not both, is needed to achieve exhaustiveness,
* the first one in the iteration order will be removed.
*/
private boolean removeUnnecessaryPatterns(Type selectorType,
PatternDescription toExpand,
Set<? extends PatternDescription> basePatterns,
Set<PatternDescription> inMissingPatterns,
Set<PatternDescription> candidates) {
boolean reduced = false;
for (Iterator<PatternDescription> it = candidates.iterator(); it.hasNext(); ) {
PatternDescription current = it.next();
Set<PatternDescription> reducedAdded = new HashSet<>(candidates);
reducedAdded.remove(current);
Set<PatternDescription> combinedPatterns =
Stream.concat(basePatterns.stream(),
replace(inMissingPatterns, toExpand, reducedAdded).stream())
.collect(Collectors.toSet());
if (computeCoverage(selectorType, combinedPatterns, PatternEquivalence.LOOSE).covered()) {
it.remove();
reduced = true;
}
}
return reduced;
}
/*
* Sort patterns so that those that are preferred for removal are in front
* of those that are preferred to remain (when there's a choice).
*/
private SequencedSet<PatternDescription> partialSortPattern(Set<PatternDescription> candidates,
Set<? extends PatternDescription> basePatterns,
Set<PatternDescription> missingPatterns) {
SequencedSet<PatternDescription> sortedCandidates = new LinkedHashSet<>();
while (!candidates.isEmpty()) {
PatternDescription mostSpecific = null;
for (PatternDescription current : candidates) {
if (mostSpecific == null ||
shouldAppearBefore(current, mostSpecific, basePatterns, missingPatterns)) {
mostSpecific = current;
}
}
sortedCandidates.add(mostSpecific);
candidates.remove(mostSpecific);
}
return sortedCandidates;
}
//where:
//true iff pd1 should appear before pd2
//false otherwise
private boolean shouldAppearBefore(PatternDescription pd1,
PatternDescription pd2,
Set<? extends PatternDescription> basePatterns,
Set<? extends PatternDescription> missingPatterns) {
if (pd1 instanceof RecordPattern rp1 && pd2 instanceof RecordPattern rp2) {
for (int c = 0; c < rp1.nested.length; c++) {
if (shouldAppearBefore((BindingPattern) rp1.nested[c],
(BindingPattern) rp2.nested[c],
basePatterns,
missingPatterns)) {
return true;
}
}
} else if (pd1 instanceof BindingPattern bp1 && pd2 instanceof BindingPattern bp2) {
Type t1 = bp1.type();
Type t2 = bp2.type();
boolean t1IsImportantRecord =
(t1.tsym.flags_field & RECORD) != 0 &&
hasMatchingRecordPattern(basePatterns, missingPatterns, bp1);
boolean t2IsImportantRecord =
(t2.tsym.flags_field & RECORD) != 0 &&
hasMatchingRecordPattern(basePatterns, missingPatterns, bp2);
if (t1IsImportantRecord && !t2IsImportantRecord) {
return false;
}
if (!t1IsImportantRecord && t2IsImportantRecord) {
return true;
}
if (!types.isSameType(t1, t2) && types.isSubtype(t1, t2)) {
return true;
}
}
return false;
}
/*
* Do the {@code basePatterns} have a record pattern at a place that corresponds to
* position of pattern {@code query} inside {@code missingPatterns}?
*/
private boolean hasMatchingRecordPattern(Set<? extends PatternDescription> basePatterns,
Set<? extends PatternDescription> missingPatterns,
PatternDescription query) {
PatternDescription root = findRootContaining(missingPatterns, query);
if (root == null) {
return false;
}
return basePatternsHaveRecordPatternOnThisSpot(basePatterns, root, query);
}
//where:
private PatternDescription findRootContaining(Set<? extends PatternDescription> rootPatterns,
PatternDescription added) {
for (PatternDescription pd : rootPatterns) {
if (isUnderRoot(pd, added)) {
return pd;
}
}
return null;
}
private boolean basePatternsHaveRecordPatternOnThisSpot(Set<? extends PatternDescription> basePatterns,
PatternDescription rootPattern,
PatternDescription added) {
if (rootPattern == added) {
return basePatterns.stream().anyMatch(pd -> pd instanceof RecordPattern);
}
if (!(rootPattern instanceof RecordPattern rootPatternRecord)) {
return false;
}
int index = -1;
for (int c = 0; c < rootPatternRecord.nested.length; c++) {
if (isUnderRoot(rootPatternRecord.nested[c], added)) {
index = c;
break;
}
}
// 'index' must be one of rootPatternRecord.nested; if not, `isUnderRoot` is inconsistent.
Assert.check(index != (-1));
int indexFin = index;
Set<PatternDescription> filteredBasePatterns =
basePatterns.stream()
.filter(pd -> pd instanceof RecordPattern)
.map(rp -> (RecordPattern) rp)
.filter(rp -> types.isSameType(rp.recordType(), rootPatternRecord.recordType()))
.map(rp -> rp.nested[indexFin])
.collect(Collectors.toSet());
return basePatternsHaveRecordPatternOnThisSpot(filteredBasePatterns, rootPatternRecord.nested[index], added);
}
private boolean isUnderRoot(PatternDescription root, PatternDescription searchFor) {
if (root == searchFor) {
return true;
} else if (root instanceof RecordPattern rp) {
for (int c = 0; c < rp.nested.length; c++) {
if (isUnderRoot(rp.nested[c], searchFor)) {
return true;
}
}
}
return false;
}
/*
* Using {@code basePattern} as a starting point, generate new {@code
* RecordPattern}s, such that all corresponding components but one, are the
* same. The component described by the {@code replaceComponent} index is
* replaced with all {@code PatternDescription}s taken from {@code
* updatedNestedPatterns} and the resulting {@code RecordPatterns}s are sent
* to {@code target}.
*/
private void generatePatternsWithReplacedNestedPattern(RecordPattern basePattern,
int replaceComponent,
Iterable<? extends PatternDescription> updatedNestedPatterns,
Set<PatternDescription> sourcePatterns,
Consumer<RecordPattern> target) {
for (PatternDescription nested : updatedNestedPatterns) {
PatternDescription[] newNested =
Arrays.copyOf(basePattern.nested, basePattern.nested.length);
newNested[replaceComponent] = nested;
target.accept(new RecordPattern(basePattern.recordType(),
basePattern.fullComponentTypes(),
newNested,
sourcePatterns));
}
}
/* For a given record type, return the record's component types, with their
* types instatiated according to the exact record type.
*/
private Type[] instantiatedComponentTypes(Type recordType) {
Type[] componentTypes = ((ClassSymbol) recordType.tsym).getRecordComponents()
.map(r -> types.memberType(recordType, r))
.toArray(s -> new Type[s]);
return componentTypes;
}
/* The strictness of determining the equivalent of patterns, used in
* nestedComponentsEquivalent.
*/
private enum PatternEquivalence {
STRICT,
LOOSE;
}
protected static class TooManyChecksException extends RuntimeException {
private static final long serialVersionUID = 0L;
private transient final Set<PatternDescription> missingPatterns;
public TooManyChecksException(Set<PatternDescription> missingPatterns) {
super(null, null, false, false);
this.missingPatterns = missingPatterns;
}
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -50,9 +50,15 @@ import static com.sun.tools.javac.code.Flags.BLOCK;
import static com.sun.tools.javac.code.Kinds.Kind.*;
import static com.sun.tools.javac.code.TypeTag.BOOLEAN;
import static com.sun.tools.javac.code.TypeTag.VOID;
import com.sun.tools.javac.comp.ExhaustivenessComputer.BindingPattern;
import com.sun.tools.javac.comp.ExhaustivenessComputer.EnumConstantPattern;
import com.sun.tools.javac.comp.ExhaustivenessComputer.ExhaustivenessResult;
import com.sun.tools.javac.comp.ExhaustivenessComputer.PatternDescription;
import com.sun.tools.javac.comp.ExhaustivenessComputer.RecordPattern;
import com.sun.tools.javac.resources.CompilerProperties.Fragments;
import static com.sun.tools.javac.tree.JCTree.Tag.*;
import com.sun.tools.javac.util.JCDiagnostic.Fragment;
import java.util.Arrays;
/** This pass implements dataflow analysis for Java programs though
* different AST visitor steps. Liveness analysis (see AliveAnalyzer) checks that
@ -696,9 +702,18 @@ public class Flow {
tree.isExhaustive = tree.hasUnconditionalPattern ||
TreeInfo.isErrorEnumSwitch(tree.selector, tree.cases);
if (exhaustiveSwitch) {
tree.isExhaustive |= exhaustiveness.exhausts(tree.selector, tree.cases);
if (!tree.isExhaustive) {
log.error(tree, Errors.NotExhaustiveStatement);
ExhaustivenessResult exhaustivenessResult = exhaustiveness.exhausts(tree.selector, tree.cases);
tree.isExhaustive = exhaustivenessResult.exhaustive();
if (!tree.isExhaustive) {
if (exhaustivenessResult.notExhaustiveDetails().isEmpty()) {
log.error(tree, Errors.NotExhaustiveStatement);
} else {
logNotExhaustiveError(tree.pos(), exhaustivenessResult, Errors.NotExhaustiveStatementDetails);
}
}
}
}
if (!tree.hasUnconditionalPattern && !exhaustiveSwitch) {
@ -735,16 +750,54 @@ public class Flow {
TreeInfo.isErrorEnumSwitch(tree.selector, tree.cases)) {
tree.isExhaustive = true;
} else {
tree.isExhaustive = exhaustiveness.exhausts(tree.selector, tree.cases);
ExhaustivenessResult exhaustivenessResult = exhaustiveness.exhausts(tree.selector, tree.cases);
tree.isExhaustive = exhaustivenessResult.exhaustive();
if (!tree.isExhaustive) {
if (exhaustivenessResult.notExhaustiveDetails().isEmpty()) {
log.error(tree, Errors.NotExhaustive);
} else {
logNotExhaustiveError(tree.pos(), exhaustivenessResult, Errors.NotExhaustiveDetails);
}
}
}
if (!tree.isExhaustive) {
log.error(tree, Errors.NotExhaustive);
}
alive = prevAlive;
alive = alive.or(resolveYields(tree, prevPendingExits));
}
private void logNotExhaustiveError(DiagnosticPosition pos,
ExhaustivenessResult exhaustivenessResult,
Error errorKey) {
List<JCDiagnostic> details =
exhaustivenessResult.notExhaustiveDetails()
.stream()
.map(this::patternToDiagnostic)
.sorted((d1, d2) -> d1.toString()
.compareTo(d2.toString()))
.collect(List.collector());
JCDiagnostic main = diags.error(null, log.currentSource(), pos, errorKey);
JCDiagnostic d = new JCDiagnostic.MultilineDiagnostic(main, details);
log.report(d);
}
private JCDiagnostic patternToDiagnostic(PatternDescription desc) {
Type patternType = types.erasure(desc.type());
return diags.fragment(switch (desc) {
case BindingPattern _ ->
Fragments.BindingPattern(patternType);
case RecordPattern rp ->
Fragments.RecordPattern(patternType,
Arrays.stream(rp.nested())
.map(this::patternToDiagnostic)
.toList());
case EnumConstantPattern ep ->
Fragments.EnumConstantPattern(patternType,
ep.enumConstant());
});
}
public void visitTry(JCTry tree) {
ListBuffer<PendingExit> prevPendingExits = pendingExits;
pendingExits = new ListBuffer<>();

View File

@ -1,5 +1,5 @@
#
# Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@ -1476,6 +1476,26 @@ compiler.err.not.exhaustive=\
compiler.err.not.exhaustive.statement=\
the switch statement does not cover all possible input values
compiler.err.not.exhaustive.details=\
the switch expression does not cover all possible input values\n\
missing patterns:
compiler.err.not.exhaustive.statement.details=\
the switch statement does not cover all possible input values\n\
missing patterns:
# 0: type
compiler.misc.binding.pattern=\
{0} _
# 0: type, 1: list of diagnostic
compiler.misc.record.pattern=\
{0}({1})
# 0: type, 1: name
compiler.misc.enum.constant.pattern=\
{0}.{1}
compiler.err.initializer.must.be.able.to.complete.normally=\
initializer must be able to complete normally

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
// key: compiler.err.not.exhaustive.details
// key: compiler.misc.binding.pattern
class BindingPattern {
int t(Object o) {
return switch (o) {
case String s -> 0;
};
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
// key: compiler.err.not.exhaustive.details
// key: compiler.misc.enum.constant.pattern
class NotExhaustiveDetails {
int t(I i) {
return switch (i) {
case R r -> -1;
case E.A -> -1;
};
}
sealed interface I {}
enum E implements I {A, B}
record R(E e) implements I {}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -22,6 +22,7 @@
*/
// key: compiler.err.not.exhaustive
// options: -XDexhaustivityMaxBaseChecks=0
class NotExhaustive {
int t(int i) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2021, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -22,6 +22,7 @@
*/
// key: compiler.err.not.exhaustive.statement
// options: -XDexhaustivityMaxBaseChecks=0
class NotExhaustive {
void t(Object o) {

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
// key: compiler.err.not.exhaustive.statement.details
// key: compiler.misc.record.pattern
class RecordPattern {
void t(R r) {
switch (r) {
case R(C1 _) -> {}
};
}
sealed interface I {}
record C1() implements I {}
record C2() implements I {}
record R(I i) {}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -2523,6 +2523,7 @@ public class Exhaustiveness extends TestRunner {
"-Xlint:-preview",
"--class-path", libClasses.toString(),
"-XDshould-stop.at=FLOW",
"-XDexhaustivityMaxBaseChecks=0",
stopAtFlow ? "-XDshould-stop.ifNoError=FLOW"
: "-XDnoop")
.outdir(classes)

View File

@ -0,0 +1,593 @@
/*
* Copyright (c) 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 8367530
* @summary Check enhanced exhaustiveness errors
* @library /tools/lib
* @modules jdk.compiler/com.sun.tools.javac.api
* jdk.compiler/com.sun.tools.javac.main
* jdk.compiler/com.sun.tools.javac.util
* @build toolbox.ToolBox toolbox.JavacTask
* @run main ExhaustivenessConvenientErrors
*/
import com.sun.tools.javac.api.ClientCodeWrapper.DiagnosticSourceUnwrapper;
import com.sun.tools.javac.util.JCDiagnostic;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import toolbox.JavacTask;
import toolbox.Task;
import toolbox.TestRunner;
import toolbox.ToolBox;
public class ExhaustivenessConvenientErrors extends TestRunner {
ToolBox tb;
public static void main(String... args) throws Exception {
new ExhaustivenessConvenientErrors().runTests();
}
ExhaustivenessConvenientErrors() {
super(System.err);
tb = new ToolBox();
}
public void runTests() throws Exception {
runTests(m -> new Object[] { Paths.get(m.getName()) });
}
@Test
public void testExhaustiveSealedClasses(Path base) throws Exception {
doTest(base,
new String[]{"""
package lib;
public sealed interface S permits A, B {}
""",
"""
package lib;
public final class A implements S {}
""",
"""
package lib;
public final class B implements S {}
"""},
"""
package test;
import lib.*;
public class Test {
private int test(S obj) {
return switch (obj) {
case A a -> 0;
};
}
}
""",
"lib.B _");
}
@Test
public void testExhaustiveSealedClassesTransitive(Path base) throws Exception {
doTest(base,
new String[]{"""
package lib;
public sealed interface S1 permits S2, A {}
""",
"""
package lib;
public sealed interface S2 extends S1 permits S3, B {}
""",
"""
package lib;
public sealed interface S3 extends S2 permits C, D {}
""",
"""
package lib;
public final class A implements S1 {}
""",
"""
package lib;
public final class B implements S2 {}
""",
"""
package lib;
public final class C implements S3 {}
""",
"""
package lib;
public final class D implements S3 {}
"""},
"""
package test;
import lib.*;
public class Test {
private int test(S1 obj) {
return switch (obj) {
case A a -> 0;
case B a -> 0;
case D a -> 0;
};
}
}
""",
"lib.C _");
}
@Test
public void testTrivialRecord(Path base) throws Exception {
doTest(base,
new String[]{"""
package lib;
public sealed interface S permits A, B {}
""",
"""
package lib;
public final class A implements S {}
""",
"""
package lib;
public final class B implements S {}
""",
"""
package lib;
public record R(S s) {}
"""},
"""
package test;
import lib.*;
public class Test {
private int test(R r) {
return switch (r) {
case R(A a) -> 0;
};
}
}
""",
"lib.R(lib.B _)");
}
@Test
public void testNonNestedRecord(Path base) throws Exception {
doTest(base,
new String[]{"""
package lib;
public sealed interface S permits A, B {}
""",
"""
package lib;
public final class A implements S {}
""",
"""
package lib;
public final class B implements S {}
""",
"""
package lib;
public record R(S s1, S s2) {}
"""},
"""
package test;
import lib.*;
public class Test {
private int test(R r) {
return switch (r) {
case R(A a, B b) -> 0;
case R(B b, A a) -> 0;
};
}
}
""",
"lib.R(lib.A _,lib.A _)",
"lib.R(lib.B _,lib.B _)");
}
@Test
public void testComplex1(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
import lib.*;
public class Test {
private int test(Root r) {
return switch (r) {
case Root(R1 _, _, _) -> 0;
};
}
sealed interface Base {}
record R1() implements Base {}
record R2() implements Base {}
record R3(Base b1, Base b2) implements Base {}
record Root(Base b1, Base b2, Base b3) {}
}
""",
"test.Test.Root(test.Test.R2 _,test.Test.Base _,test.Test.Base _)",
"test.Test.Root(test.Test.R3 _,test.Test.Base _,test.Test.Base _)");
}
@Test
public void testComplex2(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
import lib.*;
public class Test {
private int test(Root r) {
return switch (r) {
case Root(R1 _, _, _) -> 0;
case Root(R2 _, R1 _, _) -> 0;
case Root(R2 _, R2 _, R1 _) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R2 _, R1 _)) -> 0;
// case Root(R2 _, R2(R2 _, R2 _), R2(R2 _, R2 _)) -> 0;
};
}
sealed interface Base {}
record R1() implements Base {}
record R2(Base b1, Base b2) implements Base {}
record Root(Base b1, Base b2, Base b3) {}
}
""",
"test.Test.Root(test.Test.R2 _,test.Test.R2(test.Test.R2 _,test.Test.R2 _),test.Test.R2(test.Test.R2 _,test.Test.R2 _))");
}
@Test
public void testComplex3(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(Triple p) {
return switch (p) {
case Triple(B _, _, _) -> 0;
case Triple(_, A _, _) -> 0;
case Triple(_, _, A _) -> 0;
case Triple(A p, C(Nested _, NestedBaseA _), _) -> 0;
case Triple(A p, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseA _)) -> 0;
case Triple(A p, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseB _)) -> 0;
case Triple(A p, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseC _)) -> 0;
case Triple(A p, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseA _)) -> 0;
case Triple(A p, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseB _)) -> 0;
// case Path(A p, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseC _)) -> 0;
};
}
record Triple(Base c1, Base c2, Base c3) {}
sealed interface Base permits A, B {}
record A(boolean key) implements Base {
}
sealed interface B extends Base {}
record C(Nested n, NestedBase b) implements B {}
record Nested() {}
sealed interface NestedBase {}
record NestedBaseA() implements NestedBase {}
record NestedBaseB() implements NestedBase {}
record NestedBaseC() implements NestedBase {}
}
""",
"test.Test.Triple(test.Test.A _,test.Test.C(test.Test.Nested _,test.Test.NestedBaseC _),test.Test.C(test.Test.Nested _,test.Test.NestedBaseC _))");
}
@Test
public void testComplex4(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
import lib.*;
public class Test {
private int test(Root r) {
return switch (r) {
case Root(R1 _, _, _) -> 0;
case Root(R2 _, R1 _, _) -> 0;
case Root(R2 _, R2 _, R1 _) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R1 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R1 _, R2 _)) -> 0;
// case Root(R2 _, R2(R1 _, R2 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R1 _, R2 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R2 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R1 _), R2(R2 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R1 _, R1 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R1 _, R2 _)) -> 0;
case Root(R2 _, R2(R2 _, R2 _), R2(R2 _, R1 _)) -> 0;
// case Root(R2 _, R2(R2 _, R2 _), R2(R2 _, R2 _)) -> 0;
};
}
sealed interface Base {}
record R1() implements Base {}
record R2(Base b1, Base b2) implements Base {}
record Root(Base b1, Base b2, Base b3) {}
}
""",
"test.Test.Root(test.Test.R2 _,test.Test.R2(test.Test.Base _,test.Test.R2 _),test.Test.R2(test.Test.R2 _,test.Test.Base _))");
//ideally,the result would be as follow,but it is difficult to split Base on two distinct places:
// "test.Test.Root(test.Test.R2 _,test.Test.R2(test.Test.R1 _,test.Test.R2 _),test.Test.R2(test.Test.R2 _,test.Test.R1 _))",
// "test.Test.Root(test.Test.R2 _,test.Test.R2(test.Test.R2 _,test.Test.R2 _),test.Test.R2(test.Test.R2 _,test.Test.R2 _))");
}
@Test
public void testComplex5(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(Triple p) {
return switch (p) {
case Triple(B _, _, _) -> 0;
case Triple(_, A _, _) -> 0;
case Triple(_, _, A _) -> 0;
// case Triple(A _, C(Nested _, NestedBaseA _), _) -> 0;
case Triple(A _, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseA _)) -> 0;
case Triple(A _, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseB _)) -> 0;
case Triple(A _, C(Nested _, NestedBaseB _), C(Nested _, NestedBaseC _)) -> 0;
case Triple(A _, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseA _)) -> 0;
case Triple(A _, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseB _)) -> 0;
// case Path(A _, C(Nested _, NestedBaseC _), C(Nested _, NestedBaseC _)) -> 0;
};
}
record Triple(Base c1, Base c2, Base c3) {}
sealed interface Base permits A, B {}
record A(boolean key) implements Base {
}
sealed interface B extends Base {}
record C(Nested n, NestedBase b) implements B {}
record Nested() {}
sealed interface NestedBase {}
record NestedBaseA() implements NestedBase {}
record NestedBaseB() implements NestedBase {}
record NestedBaseC() implements NestedBase {}
}
""",
"test.Test.Triple(test.Test.A _,test.Test.C(test.Test.Nested _,test.Test.NestedBaseA _),test.Test.C _)",
//the following could be:
//test.Test.Triple(test.Test.A _,test.Test.C(test.Test.Nested _,test.Test.NestedBaseC _),test.Test.C(test.Test.Nested _,test.Test.NestedBaseC _))
"test.Test.Triple(test.Test.A _,test.Test.C(test.Test.Nested _,test.Test.NestedBaseC _),test.Test.C _)");
}
@Test
public void testNoInfiniteRecursion(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(R r) {
return switch (r) {
case R(_, _, R(_, _, _, _), String s) -> 0;
case R(_, _, R(_, _, _, String str), _) -> 0;
};
}
}
public record R(R r1, R r2, R r3, Object o) {}
""",
"test.R(test.R _,test.R _,test.R(test.R _,test.R _,test.R _,java.lang.Object _),java.lang.Object _)");
}
@Test
public void testEnum(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(I i) {
return switch (i) {
case E.A -> 0;
case C _ -> 1;
};
}
sealed interface I {}
enum E implements I {A, B}
final class C implements I {}
}
public record R(R r1, R r2, R r3, Object o) {}
""",
"test.Test.E.B");
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(I i) {
return switch (i) {
case C _ -> 1;
};
}
sealed interface I {}
enum E implements I {A, B}
final class C implements I {}
}
public record R(R r1, R r2, R r3, Object o) {}
""",
"test.Test.E _");
}
@Test
public void testInstantiateComponentTypes(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
public class Test {
private int test(Pair<Base<Base>> p) {
return switch (p) {
case Pair(A(A(_)) -> 0;
case Pair(A(B(_)) -> 0;
case Pair(B(A(_)) -> 0;
};
}
record Pair<T>(T c) {}
sealed interface Base<T> permits A, B {}
record A<T>(T c) implements Base<T> {}
record B<T>(T c) implements Base<T> {}
}
""",
"test.Test.Pair(test.Test.B(test.Test.B _))");
}
@Test
public void testNeedToExpandIfRecordExists(Path base) throws Exception {
doTest(base,
new String[0],
"""
package test;
class Test {
sealed interface A { }
record B() implements A { }
record C(A a) implements A { }
void test(A a) {
switch (a) {
case C(B _) -> throw null;
}
}
} """,
"test.Test.B _",
"test.Test.C(test.Test.C _)");
}
@Test
public void testComplex6(Path base) throws Exception {
doTest(base,
new String[0],
"""
public class Test {
sealed interface Base {}
record NoOp() implements Base {}
record Const() implements Base {}
record Pair(Base n1,
Base b2) implements Base {}
int t(Base b) {
return switch (b) {
case NoOp _ -> 0;
case Const _ -> 0;
case Pair(NoOp _, _) -> 0;
case Pair(Const _, _) -> 0;
case Pair(Pair _, NoOp _) -> 0;
case Pair(Pair _, Const _) -> 0;
case Pair(Pair _, Pair(NoOp _, _)) -> 0;
case Pair(Pair _, Pair(Const _, _)) -> 0;
case Pair(Pair _, Pair(Pair(NoOp _, _), _)) -> 0;
case Pair(Pair _, Pair(Pair(Const _, _), _)) -> 0;
case Pair(Pair(NoOp _, _), Pair(Pair(Pair _, _), _)) -> 0;
case Pair(Pair(Const _, _), Pair(Pair(Pair _, _), _)) -> 0;
// case Pair(Pair(Pair _, _), Pair(Pair(Pair _, _), _)) -> 0;
};
}
}
""",
"Test.Pair(Test.Pair(Test.Pair _,Test.Base _),Test.Pair(Test.Pair(Test.Pair _,Test.Base _),Test.Base _))");
}
private void doTest(Path base, String[] libraryCode, String testCode, String... expectedMissingPatterns) throws IOException {
Path current = base.resolve(".");
Path libClasses = current.resolve("libClasses");
Files.createDirectories(libClasses);
if (libraryCode.length != 0) {
Path libSrc = current.resolve("lib-src");
for (String code : libraryCode) {
tb.writeJavaFiles(libSrc, code);
}
new JavacTask(tb)
.outdir(libClasses)
.files(tb.findJavaFiles(libSrc))
.run();
}
Path src = current.resolve("src");
tb.writeJavaFiles(src, testCode);
Path classes = current.resolve("libClasses");
Files.createDirectories(libClasses);
Set<String> missingPatterns = new HashSet<>();
new JavacTask(tb)
.options("-XDrawDiagnostics",
"--class-path", libClasses.toString(),
"-XDshould-stop.at=FLOW",
"-XDshould-stop.ifNoError=FLOW",
"-XDexhaustivityMaxBaseChecks=" + Long.MAX_VALUE) //never give up
.outdir(classes)
.files(tb.findJavaFiles(src))
.diagnosticListener(d -> {
if ("compiler.err.not.exhaustive.details".equals(d.getCode()) ||
"compiler.err.not.exhaustive.statement.details".equals(d.getCode())) {
if (d instanceof DiagnosticSourceUnwrapper uw) {
d = uw.d;
}
if (d instanceof JCDiagnostic.MultilineDiagnostic diag) {
diag.getSubdiagnostics()
.stream()
.map(fragment -> fragment.toString())
.forEach(missingPatterns::add);
}
}
})
.run(Task.Expect.FAIL)
.writeAll();
Set<String> expectedPatterns = new HashSet<>(List.of(expectedMissingPatterns));
if (!expectedPatterns.equals(missingPatterns)) {
throw new AssertionError("Incorrect errors, expected: " + expectedPatterns +
", actual: " + missingPatterns);
}
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2023, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -103,16 +103,19 @@ public class PrimitiveInstanceOfComboTest extends ComboInstance<PrimitiveInstanc
ComboTask task1 = newCompilationTask()
.withSourceFromTemplate(test1.replace("#{TYPE1}", type1.code).replace("#{TYPE2}", type2.code))
.withOption("--enable-preview")
.withOption("-XDexhaustivityMaxBaseChecks=0")
.withOption("-source").withOption(JAVA_VERSION);
ComboTask task2 = newCompilationTask()
.withSourceFromTemplate(test2.replace("#{TYPE1}", type1.code).replace("#{TYPE2}", type2.code))
.withOption("--enable-preview")
.withOption("-XDexhaustivityMaxBaseChecks=0")
.withOption("-source").withOption(JAVA_VERSION);
ComboTask task3 = newCompilationTask()
.withSourceFromTemplate(test3.replace("#{TYPE1}", type1.code).replace("#{TYPE2}", type2.code))
.withOption("--enable-preview")
.withOption("-XDexhaustivityMaxBaseChecks=0")
.withOption("-source").withOption(JAVA_VERSION);
task1.generate(result1 -> {

View File

@ -2,7 +2,7 @@
* @test /nodynamiccopyright/
* @summary Retain exhaustiveness properties of switches with a constant selector
* @enablePreview
* @compile/fail/ref=PrimitivePatternsSwitchConstants.out -XDrawDiagnostics -XDshould-stop.at=FLOW PrimitivePatternsSwitchConstants.java
* @compile/fail/ref=PrimitivePatternsSwitchConstants.out -XDrawDiagnostics -XDshould-stop.at=FLOW -XDexhaustivityMaxBaseChecks=0 PrimitivePatternsSwitchConstants.java
*/
public class PrimitivePatternsSwitchConstants {
void testConstExpressions() {

View File

@ -3,7 +3,7 @@
* @bug 8304487 8325653 8332463
* @summary Compiler Implementation for Primitive types in patterns, instanceof, and switch (Preview)
* @enablePreview
* @compile/fail/ref=PrimitivePatternsSwitchErrors.out -XDrawDiagnostics -XDshould-stop.at=FLOW PrimitivePatternsSwitchErrors.java
* @compile/fail/ref=PrimitivePatternsSwitchErrors.out -XDrawDiagnostics -XDshould-stop.at=FLOW -XDexhaustivityMaxBaseChecks=0 PrimitivePatternsSwitchErrors.java
*/
public class PrimitivePatternsSwitchErrors {
record R_int(int x) {}

View File

@ -2,7 +2,7 @@
* @test /nodynamiccopyright/
* @bug 8262891 8269146 8269113 8348928
* @summary Verify errors related to pattern switches.
* @compile/fail/ref=SwitchErrors.out -XDrawDiagnostics -XDshould-stop.at=FLOW SwitchErrors.java
* @compile/fail/ref=SwitchErrors.out -XDrawDiagnostics -XDshould-stop.at=FLOW -XDexhaustivityMaxBaseChecks=0 SwitchErrors.java
*/
public class SwitchErrors {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2023, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -26,9 +26,9 @@
* @bug 8318913
* @summary Verify no error is when compiling a class whose permitted types are not exported
* @modules jdk.compiler
* @compile/fail/ref=NonExportedPermittedTypes.out -XDrawDiagnostics NonExportedPermittedTypes.java
* @compile/fail/ref=NonExportedPermittedTypes.out --release 21 -XDrawDiagnostics NonExportedPermittedTypes.java
* @compile/fail/ref=NonExportedPermittedTypes.out --release ${jdk.version} -XDrawDiagnostics NonExportedPermittedTypes.java
* @compile/fail/ref=NonExportedPermittedTypes.out -XDrawDiagnostics -XDexhaustivityMaxBaseChecks=0 NonExportedPermittedTypes.java
* @compile/fail/ref=NonExportedPermittedTypes.out --release 21 -XDrawDiagnostics -XDexhaustivityMaxBaseChecks=0 NonExportedPermittedTypes.java
* @compile/fail/ref=NonExportedPermittedTypes.out --release ${jdk.version} -XDrawDiagnostics -XDexhaustivityMaxBaseChecks=0 NonExportedPermittedTypes.java
*/

View File

@ -2,7 +2,7 @@
* @test /nodynamiccopyright/
* @bug 8206986
* @summary Verify behavior of not exhaustive switch expressions.
* @compile/fail/ref=ExpressionSwitchNotExhaustive.out -XDrawDiagnostics ExpressionSwitchNotExhaustive.java
* @compile/fail/ref=ExpressionSwitchNotExhaustive.out -XDrawDiagnostics -XDexhaustivityMaxBaseChecks=0 ExpressionSwitchNotExhaustive.java
*/
public class ExpressionSwitchNotExhaustive {